Compare commits

..

5 Commits

Author SHA1 Message Date
Srikanth Chekuri
eea594af3b Merge branch 'main' into testing-feature 2026-03-02 23:49:14 +05:30
srikanthccv
036f362fa6 chore: update docs 2026-02-28 19:25:24 +05:30
Srikanth Chekuri
6a9a910eb6 Merge branch 'main' into testing-feature 2026-02-28 18:27:07 +05:30
Srikanth Chekuri
4cd7489280 Merge branch 'main' into testing-feature 2026-02-28 14:00:30 +05:30
srikanthccv
ff0736532d chore: add guidelines for authoring tests and high level guide for how to approach new features 2026-02-28 11:35:47 +05:30
91 changed files with 2498 additions and 2315 deletions

View File

@@ -1,4 +1,4 @@
FROM node:22-bookworm AS build
FROM node:18-bullseye AS build
WORKDIR /opt/
COPY ./frontend/ ./

View File

@@ -2040,6 +2040,31 @@ components:
required:
- id
type: object
TypesInvite:
properties:
createdAt:
format: date-time
type: string
email:
type: string
id:
type: string
inviteLink:
type: string
name:
type: string
orgId:
type: string
role:
type: string
token:
type: string
updatedAt:
format: date-time
type: string
required:
- id
type: object
TypesOrganization:
properties:
alias:
@@ -2072,6 +2097,17 @@ components:
role:
type: string
type: object
TypesPostableAcceptInvite:
properties:
displayName:
type: string
password:
type: string
sourceUrl:
type: string
token:
type: string
type: object
TypesPostableForgotPassword:
properties:
email:
@@ -2160,8 +2196,6 @@ components:
type: string
role:
type: string
status:
type: string
updatedAt:
format: date-time
type: string
@@ -3241,6 +3275,53 @@ paths:
tags:
- global
/api/v1/invite:
get:
deprecated: false
description: This endpoint lists all invites
operationId: ListInvite
responses:
"200":
content:
application/json:
schema:
properties:
data:
items:
$ref: '#/components/schemas/TypesInvite'
type: array
status:
type: string
required:
- status
- data
type: object
description: OK
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: List invites
tags:
- users
post:
deprecated: false
description: This endpoint creates an invite for a user
@@ -3257,7 +3338,7 @@ paths:
schema:
properties:
data:
$ref: '#/components/schemas/TypesUser'
$ref: '#/components/schemas/TypesInvite'
status:
type: string
required:
@@ -3303,6 +3384,151 @@ paths:
summary: Create invite
tags:
- users
/api/v1/invite/{id}:
delete:
deprecated: false
description: This endpoint deletes an invite by id
operationId: DeleteInvite
parameters:
- in: path
name: id
required: true
schema:
type: string
responses:
"204":
description: No Content
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- ADMIN
- tokenizer:
- ADMIN
summary: Delete invite
tags:
- users
/api/v1/invite/{token}:
get:
deprecated: false
description: This endpoint gets an invite by token
operationId: GetInvite
parameters:
- in: path
name: token
required: true
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TypesInvite'
status:
type: string
required:
- status
- data
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
summary: Get invite
tags:
- users
/api/v1/invite/accept:
post:
deprecated: false
description: This endpoint accepts an invite by token
operationId: AcceptInvite
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/TypesPostableAcceptInvite'
responses:
"201":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/TypesUser'
status:
type: string
required:
- status
- data
type: object
description: Created
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"404":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Not Found
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
summary: Accept invite
tags:
- users
/api/v1/invite/bulk:
post:
deprecated: false

View File

@@ -80,32 +80,9 @@ Do not define an interface before you have at least two concrete implementations
The exception is interfaces required for testing (e.g., for mocking an external dependency). In that case, define the interface in the **consuming** package, not the providing package, following the Go convention of [accepting interfaces and returning structs](https://go.dev/wiki/CodeReviewComments#interfaces).
### 6. Wrappers must add semantics, not just rename
A wrapper type is justified when it adds meaning, validation, or invariants that the underlying type does not carry. It is not justified when it merely renames fields or reorganizes the same data into a different shape.
```go
// Justified: adds validation that the underlying string does not carry.
type OrgID struct{ value string }
func NewOrgID(s string) (OrgID, error) { /* validates format */ }
// Not justified: renames fields with no new invariant or behavior.
type UserInfo struct {
Name string // same as source.Name
Email string // same as source.Email
}
```
Ask: what does the wrapper guarantee that the underlying type does not? If the answer is nothing, use the underlying type directly.
## When a new type IS warranted
A new type earns its place when it meets **at least one** of these criteria:
- **Serialization boundary**: It must be persisted, sent over the wire, or written to config. The source type is unsuitable (unexported fields, function pointers, cycles).
- **Invariant enforcement**: The constructor or methods enforce constraints that raw data does not carry (e.g., non-empty, validated format, bounded range).
- **Multiple distinct consumers**: Three or more call sites use the type in meaningfully different ways. The type is the shared vocabulary between them.
- **Dependency firewall**: The type lives in a lightweight package so that consumers avoid importing a heavy dependency.
See [Types](types.md#when-a-new-type-is-warranted) for the criteria that justify introducing a new type.
## What should I remember?

View File

@@ -49,6 +49,43 @@ Follow these rules:
5. **Test files stay alongside source**: Unit tests go in `_test.go` files next to the code they test, in the same package.
## How should I order code within a file?
Within a single `.go` file, declarations should follow this order:
1. Constants
2. Variables
3. Types (structs, interfaces)
4. Constructor functions (`New...`)
5. Exported methods and functions
6. Unexported methods and functions
```go
// 1. Constants
const defaultTimeout = 30 * time.Second
// 2. Variables
var ErrNotFound = errors.New(errors.TypeNotFound, errors.CodeNotFound, "resource not found")
// 3. Types
type Store struct {
db *sql.DB
}
// 4. Constructors
func NewStore(db *sql.DB) *Store {
return &Store{db: db}
}
// 5. Exported methods
func (s *Store) Get(ctx context.Context, id string) (*Resource, error) { ... }
// 6. Unexported methods
func (s *Store) buildQuery(id string) string { ... }
```
This ordering makes files predictable. A reader scanning from top to bottom sees the contract (constants, types, constructors) before the implementation (methods), and exported behavior before internal helpers.
## How should I name symbols?
### Exported symbols
@@ -90,9 +127,7 @@ Never introduce circular imports. If package A needs package B and B needs A, ex
## Where do shared types go?
Most types belong in `pkg/types/` under a domain-specific sub-package (e.g., `pkg/types/ruletypes`, `pkg/types/authtypes`).
Do not put domain logic in `pkg/types/`. Only data structures, constants, and simple methods.
See [Types](types.md) for full conventions on type placement, naming variants, composition, and constructors.
## How do I merge or move packages?
@@ -105,6 +140,10 @@ When two packages are tightly coupled (one imports the other's constants, they c
5. Delete the old packages. Do not leave behind re-export shims.
6. Verify with `go build ./...`, `go test ./<new-pkg>/...`, and `go vet ./...`.
## When should I use valuer types?
See [Types](types.md#typed-domain-values-pkgvaluer) for valuer types, when to use them, and the enum pattern using `valuer.String`.
## When should I add documentation?
Add a `doc.go` with a package-level comment for any package that is non-trivial or has multiple consumers. Keep it to 13 sentences:
@@ -119,6 +158,10 @@ package cache
- Package names are domain-specific and lowercase. Never generic names like `util` or `common`.
- The file matching the package name (e.g., `cache.go`) defines the public interface. Implementation details go elsewhere.
- Within a file, order declarations: constants, variables, types, constructors, exported functions, unexported functions.
- Segregate types across files by responsibility. A file with 5 unrelated types is harder to navigate than 5 files with one type each.
- Use valuer types (`valuer.String`, `valuer.Email`, `valuer.UUID`, `valuer.TextDuration`) for domain values that need validation, normalization, or cross-boundary serialization. See [Types](types.md#typed-domain-values-pkgvaluer) for details.
- Avoid `init()` functions. If you need to initialize a variable, use a package-level `var` with a function call or a `sync.Once`. `init()` hides execution order, makes testing harder, and has caused subtle bugs in large codebases.
- Never introduce circular imports. Extract shared types into `pkg/types/` when needed.
- Watch for symbol name collisions when merging packages, prefix to disambiguate.
- Put test helpers in a `{pkg}test/` sub-package, not in the main package.

View File

@@ -8,7 +8,31 @@ We adhere to three primary style guides as our foundation:
- [Code Review Comments](https://go.dev/wiki/CodeReviewComments) - For understanding common comments in code reviews
- [Google Style Guide](https://google.github.io/styleguide/go/) - Additional practices from Google
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development. In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package:
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development.
**Discover before inventing.** Before writing new code, search the codebase for existing solutions. SigNoz has established patterns for common problems: `pkg/valuer` for typed domain values, `pkg/errors` for structured errors, `pkg/factory` for provider wiring, `{pkg}test/` sub-packages for test helpers, and shared fixtures for integration tests. Duplicating what already exists creates drift and maintenance burden. When you find an existing pattern, use it. When you don't find one, check with the maintainers before building your own.
## How to approach a feature
Building a feature is not one task, it is a sequence of concerns that build on each other. Work through them in this order:
1. **Domain design (types).** Define the types that represent your domain. What are the entities, what are their relationships, what are the constraints? This is where you decide your data model. Get this right first because everything else depends on it. See [Packages](packages.md) and [Abstractions](abstractions.md).
2. **Structure (services / modules / handlers).** Place your code in the right layer given the current infrastructure. If the current structure does not work for your feature, that is the time to open a discussion and write a technical document, not to silently reshape things in the same PR. See [Handler](handler.md) and [Provider](provider.md).
3. **HTTP endpoints (paths, status codes, errors).** Pay close attention to detail here. Paths, methods, request/response shapes, status codes, error codes. These are the contract with consumers and are expensive to change after release. See [Endpoint](endpoint.md) and [Handler](handler.md).
4. **Database constraints (org_id, foreign keys, migrations).** Ensure org scoping, schema consistency, and migration correctness. See [SQL](sql.md).
5. **Business logic (module layer).** With the types, structure, endpoints, and storage in place, the focus narrows to the actual logic. This is where review should concentrate on correctness, edge cases, and error handling.
This ordering also gives you a natural way to split PRs. Each layer affects a different area and requires a different lens for review. A PR that mixes refactoring with new feature logic is hard to review and risky to ship. Separate them.
For large refactors or features that touch multiple subsystems, write a short technical document outlining the design and get relevant stakeholders aligned before starting implementation. This saves significant back-and-forth during review.
## Area-specific guides
In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package:
- [Abstractions](abstractions.md) - When to introduce new types and intermediate representations
- [Errors](errors.md) - Structured error handling
@@ -20,3 +44,5 @@ We **recommend** (almost enforce) reviewing these guides before contributing to
- [Packages](packages.md) - Naming, layout, and conventions for `pkg/` packages
- [Service](service.md) - Managed service lifecycle with `factory.Service`
- [SQL](sql.md) - Database and SQL patterns
- [Testing](testing.md) - Writing tests that catch bugs without becoming a maintenance burden
- [Types](types.md) - Type placement, naming variants, composition, and constructors

View File

@@ -0,0 +1,260 @@
# Testing
This document provides rules for writing tests that catch real bugs and do not become a maintenance burden. It covers both how to write good tests and how to recognize bad ones.
## Why we write tests
Tests exist to give confidence that the system behaves correctly. A good test suite lets you change code and know immediately (or in a reasonable time) whether you broke something. A bad test suite lets you change code (and then spend hours figuring out whether the failures are real) and still lets the bugs slip in.
Every test should be written to answer one question: **if this test fails, does that mean a user-visible behavior is broken?** If the answer is no, reconsider whether the test should exist.
Not all tests are equal. Different scopes serve different purposes, and the balance matters.
- **Unit tests**: Fast, focused, test a single function or type in isolation. These form the foundation. They should run in milliseconds, have no I/O, and be fully deterministic.
- **Integration tests**: Verify that components work together against real dependencies (ClickHouse, PostgreSQL, etc.). Slower, but catch problems that unit tests cannot: real query behavior, configuration issues, serialization mismatches.
- **End-to-end tests**: Validate full system behavior from the outside. Expensive to write and maintain, but necessary for critical user flows.
When a test can be written at a smaller scope, prefer the smaller scope. But do not force a unit test where an integration test is the natural fit.
## What to test
### Test behaviors, not implementations
A test should verify what the code does, not how it does it (unless the goal of the test is specifically how something happen). If you can refactor the internals of a function e.g, change a query, rename a variable, restructure the logic and no user-visible behavior changes, no test should break.
```go
// Good: tests the behavior "given this input, expect this output."
func TestDiscountApplied(t *testing.T) {
order := NewOrder(item("widget", 100))
order.ApplyDiscount(10)
assert.Equal(t, 90, order.Total())
}
// Bad: tests the implementation "did it call the right internal method?"
func TestDiscountApplied(t *testing.T) {
mockPricer := new(MockPricer)
mockPricer.On("CalculateDiscount", 100, 10).Return(90)
order := NewOrder(item("widget", 100), WithPricer(mockPricer))
order.ApplyDiscount(10)
mockPricer.AssertCalled(t, "CalculateDiscount", 100, 10)
}
```
The first test survives a refactoring of how discounts are calculated. The second test breaks the moment you rename the method, change its signature, or inline the logic.
**The refactoring test**: before committing a test, ask if someone refactors the internals tomorrow without changing any behavior, will this test break? If yes, consider updating the test.
### Output format as behavior
Some functions exist specifically to produce a formatted output: a query builder generates SQL, a serializer generates JSON, a code generator produces source code. In these cases, the output string *is* the behavior and asserting on it is valid and necessary. The function's contract is the exact output it produces.
This is different from testing a function that *uses* a query internally. If a function's job is to fetch data from a database, the query it sends is an implementation detail and the returned data is the behavior. If its job is to *build* a query for someone else to execute, the query string is the behavior.
The distinction: **is the formatted output the function's product, or the function's mechanism?** Test the product, not the mechanism.
### Test at the public API boundary
Write tests against the exported functions and methods that consumers actually call. Do not test unexported helpers directly. If an unexported function has complex logic worth testing, that is a signal it should be extracted into its own package with its own public API.
### Test edge cases and error paths
The most valuable tests cover the cases that are easy to get wrong:
- Empty inputs, nil inputs, zero values.
- Boundary conditions (off-by-one, first element, last element).
- Error conditions (what happens when the dependency fails?).
- Concurrent access, if the code is designed for it.
A test for the happy path of a trivial function adds little value. A test for the error path of a complex function prevents real bugs.
### The Beyonce Rule
"If you liked it, then you should have put a test on it." Any behavior you want to preserve such as correctness, performance characteristics, security constraints, error handling should be covered by a test. If it breaks and there is no test, that is not a regression; it is an untested assumption.
## How to write a test
### Structure: arrange, act, assert
Every test should have three clearly separated sections:
```go
func TestTransferInsufficientFunds(t *testing.T) {
// Arrange: set up the preconditions.
from := NewAccount(50)
to := NewAccount(0)
// Act: perform the operation being tested.
err := Transfer(from, to, 100)
// Assert: verify the outcome.
require.Error(t, err)
assert.Equal(t, 50, from.Balance())
assert.Equal(t, 0, to.Balance())
}
```
Do not interleave setup and assertions. Do not put assertions in helper functions that also perform setup. Keep the three sections visually distinct.
### One behavior per test
Each test function should verify one behavior. If a test name needs "and" in it, split it into two tests.
```go
// Good: one behavior per test.
func TestParseValidInput(t *testing.T) { ... }
func TestParseEmptyInput(t *testing.T) { ... }
func TestParseMalformedInput(t *testing.T) { ... }
// Bad: multiple behaviors in one test.
func TestParse(t *testing.T) {
// test valid input
// test empty input
// test malformed input
}
```
Table-driven tests are fine when the behavior is the same and only the inputs/outputs vary.
### Name tests after behaviors
Test names should describe the scenario and the expected outcome, not the function being tested.
```go
// Good: describes the behavior.
func TestWithdrawal_InsufficientFunds_ReturnsError(t *testing.T)
func TestWithdrawal_ZeroBalance_ReturnsError(t *testing.T)
// Bad: describes the function.
func TestWithdraw(t *testing.T)
func TestWithdrawError(t *testing.T)
```
### Eliminate logic in tests
Tests should be straight-line code. No `if`, no `for`, no `switch`. If you feel the need to add control flow to a test, either split it into multiple tests or restructure the test data.
A test with logic in it needs its own tests. That is a sign something has gone wrong.
### Write clear failure messages
When a test fails, the failure message should tell you what went wrong without reading the test source.
```go
// Good: failure message explains the context.
assert.Equal(t, expected, actual, "discount should be applied to order total")
// Bad: failure message is just the default.
assert.Equal(t, expected, actual)
```
Use `require` for preconditions that must hold for the rest of the test to make sense. Use `assert` for the actual verifications. This avoids cascading failures from a single root cause.
## How to recognize a bad test
A bad test costs more to maintain than the bugs it prevents. Learning to identify bad tests is as important as learning to write good ones. Always evaluate a test critically before commiting it.
### Tests that duplicate the implementation
If a test contains the same logic as the code it tests, it verifies nothing. It will pass when the code is wrong in the same way the test is wrong, and it will break whenever the code changes even if the change is correct.
A common form: mocking a database, setting up canned rows, calling a function that queries and scans those rows, then asserting that the function returned exactly those rows. The test encodes the query, the row structure, and the scan logic. The same things the production code does. If the function has no branching logic beyond "query and scan," this test is a mirror of the implementation, not a check on it. An integration test against a real database verifies the actual behavior; the mock-based test verifies that the code matches the test author's expectations of the code.
### Tests for functions with no interesting logic
Not every function needs a test. A function that prepares a query, sends it, and scans the result has no branching, no edge cases, and no logic that could be wrong independently of the query being correct. Unit-testing it means mocking the database, which means the test does not verify the query works. It only verifies the function calls the mock in the expected way.
Ask: **what bug would this test catch that would not be caught by the integration test or by the tests of the calling code?** If the answer is nothing, skip the unit test. A missing test is better than a test that provides false confidence.
### Tests that rebuild the dependency boundary
When a test creates an in-package mock of an external interface (database driver, HTTP client, file system) and that mock contains non-trivial logic (reflection-based scanning, response simulation, state machines), the test is now testing its own mock as much as the production code. Bugs in the mock produce false passes or false failures, and the mock must be maintained alongside the real dependency.
If the mock is complex enough to have its own bugs, you have rebuilt the dependency boundary rather than testing against it. Use the real dependency (via integration test) or use a well-maintained fake provided by the dependency's authors.
### Tests that exist for coverage
A test that exercises a function without meaningfully verifying its output adds coverage without adding confidence. Calling a type-conversion function with every numeric type and asserting it does not panic covers lines but does not catch regressions. The function would need to be rewritten to fail, and any such rewrite would be caught by the callers' tests.
Before writing a test, identify the specific failure mode it guards against. If you cannot name one, the test is not worth writing.
### Tests that test the language
Do not test that language type system, standard library, or well-known third-party libraries work correctly. Testing that `reflect.Kind` returns the right value for each type, that pointer dereferencing works, or that a type switch dispatches correctly adds maintenance burden without catching any plausible bug in your code.
## Brittle tests
A brittle test is one that fails when production code changes without an actual bug being introduced. Brittle tests are expensive: they slow down development, train people to ignore failures, and provide no real safety net. Common sources of brittleness:
- **Asserting on implementation details**: Verifying which internal methods were called, in what order, or with what intermediate values. If the method is renamed or the order changes but the output is the same, the test breaks for no reason.
- **Asserting on serialized representations when the format is not the contract**: Matching exact SQL strings, JSON output, or log messages produced by a function whose job is not to produce that format.
- **Over-constrained mocks**: Setting up a mock that expects specific arguments in a specific sequence. Any refactoring of the call pattern breaks the mock setup even if behavior is preserved.
- **Shared mutable state**: Tests that depend on data left behind by other tests. A change in execution order or a new test case causes unrelated failures.
- **Time-dependence**: Tests that use `time.Now()`, `time.Sleep()`, or real timers. These produce flaky results and break under load.
When you encounter a brittle test, fix or delete it. Do not work around it.
## DAMP
Test code should prioritize clarity (DAMP: Descriptive And Meaningful Phrases).
```go
// DAMP: each test is self-contained and readable.
func TestCreateUser(t *testing.T) {
user := User{Name: "Alice", Email: "alice@example.com"}
err := store.Create(ctx, user)
require.NoError(t, err)
}
func TestCreateDuplicateUser(t *testing.T) {
user := User{Name: "Alice", Email: "alice@example.com"}
_ = store.Create(ctx, user)
err := store.Create(ctx, user)
assert.ErrorIs(t, err, ErrAlreadyExists)
}
```
Shared setup helpers are fine for constructing objects with sensible defaults. But each test should explicitly set the values it depends on rather than relying on hidden defaults in a shared fixture.
## Flaky tests
A flaky test is one that sometimes passes and sometimes fails without any code change. Flaky tests erode trust in the entire suite. Once people learn to re-run and ignore failures, real bugs slip through.
Common causes and fixes:
- **Timing and sleeps**: Replace `time.Sleep` with channels, condition variables, or polling with a timeout.
- **Uncontrolled concurrency**: Use deterministic synchronization rather than relying on goroutine scheduling.
- **Shared state between tests**: Each test should set up and tear down its own state.
If a test is flaky and you cannot fix the root cause quickly, skip or delete it. A skipped test with an explanation is better than a flaky test that trains everyone to ignore red builds.
## Code coverage
Code coverage measures which lines were executed, not whether the code is correct. A function that is called but whose output is never checked has 100% coverage and 0% verification.
Do not use coverage as a target to hit. Use it as a tool to find gaps such as untested error paths, unreachable branches, dead code. A codebase with 60% meaningful coverage is better than one with 95% coverage achieved by testing trivial getters.
## Tests are code
Tests must be maintained and they are not second-class citizen. You should apply the same standards for readability, naming, and structure that you apply to production code. We do not tolerate complexity in tests just because they are tests.
However, tests should be simpler than production code. If a test requires its own helper library, complex setup, or nested control flow, step back and ask whether you are testing the right thing at the right level. This is not a blanket rule but a prompt to pause, assess the situation, and check whether the complexity is justified.
## What should I remember?
- If refactoring internals breaks your test but no behavior changed, the test is likely bad. Delete it or consider updating it.
- Test what the code does, not how it does it. Verify outputs and state, not method calls.
- Output format is behavior when the function's job is to produce that format. It is not behavior when the function uses it internally.
- Ask what specific bug this test catches. If you cannot name one, do not write it.
- Always evaluate whether the test adds confidence, not just lines.
- One behavior per test. Name it after the scenario, not the function.
- No logic in tests. Straight-line code only.
- Flaky tests are not acceptable. Fix the root cause or nuke the test code.
- Coverage measures execution, not correctness.
## Mandatory reading
- What to look for in a code review: Tests - https://google.github.io/eng-practices/review/reviewer/looking-for.html#tests
- Testing Overview - https://abseil.io/resources/swe-book/html/ch11.html
- Unit Testing - https://abseil.io/resources/swe-book/html/ch12.html
- Test Doubles - https://abseil.io/resources/swe-book/html/ch13.html
- Larger Testing - https://abseil.io/resources/swe-book/html/ch14.html

View File

@@ -0,0 +1,272 @@
# Types
This guide covers how types are organised, named, constructed, and composed so you can add new ones consistently.
## Where do types live?
Types live in `pkg/types/` and its sub-packages:
```
pkg/types/
├── auditable.go # TimeAuditable, UserAuditable
├── identity.go # Identifiable (UUID primary key)
├── user.go # User, PostableRegisterOrgAndAdmin, UserStore
├── alertmanagertypes/ # Alert manager domain types
│ ├── channel.go
│ ├── receiver.go
│ └── config.go
├── authtypes/ # Auth domain types
└── ruletypes/ # Rule domain types
└── maintenance.go
```
Follow these rules:
1. **Embeddable building blocks** go in `pkg/types/` directly `Identifiable`, `TimeAuditable`, `UserAuditable`.
2. **Domain-specific types** go in a sub-package named `pkg/types/<domain>types/` (e.g., `alertmanagertypes`, `ruletypes`, `authtypes`).
3. **No domain logic** in type packages. Only data structures, constants, and simple methods. Domain services import from type packages, not the other way around.
4. **Domain services import types, not vice versa.** If a type needs a service, the design is likely wrong and you should restructure so the service operates on the type.
## Type variants
A domain entity often has multiple representations depending on where it appears in the system. We use naming prefixes to distinguish them:
| Prefix | Purpose | Example |
|---|---|---|
| `Postable<Type>` | API request input | `PostableRegisterOrgAndAdmin` |
| `Gettable<Type>` | API response output | `GettablePlannedMaintenance` |
| `Storable<Type>` | Database model (embeds `bun.BaseModel`) | `StorablePlannedMaintenance` |
| Plain `<Type>` | Domain logic type | `User` |
Not every entity needs all four variants. Start with the plain type and add variants only when the API or database representation genuinely differs.
Here is a concrete example from `pkg/types/ruletypes/maintenance.go`:
```go
// Database model embeds bun.BaseModel and composition types
type StorablePlannedMaintenance struct {
bun.BaseModel `bun:"table:planned_maintenance"`
types.Identifiable
types.TimeAuditable
types.UserAuditable
Name string `bun:"name,type:text,notnull"`
Description string `bun:"description,type:text"`
Schedule *Schedule `bun:"schedule,type:text,notnull"`
OrgID string `bun:"org_id,type:text"`
}
// API response: flat struct with JSON tags, computed fields like Status
type GettablePlannedMaintenance struct {
Id string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
Schedule *Schedule `json:"schedule"`
RuleIDs []string `json:"alertIds"`
CreatedAt time.Time `json:"createdAt"`
CreatedBy string `json:"createdBy"`
UpdatedAt time.Time `json:"updatedAt"`
UpdatedBy string `json:"updatedBy"`
Status string `json:"status"`
Kind string `json:"kind"`
}
```
When the API shape exactly matches the domain type, use a type alias instead of duplicating fields:
```go
// From pkg/types/user.go
type GettableUser = User
```
## Composition via embedding
`pkg/types/` provides small, reusable structs that you embed into your domain types:
```go
// pkg/types/identity.go
type Identifiable struct {
ID valuer.UUID `json:"id" bun:"id,pk,type:text"`
}
// pkg/types/auditable.go
type TimeAuditable struct {
CreatedAt time.Time `bun:"created_at" json:"createdAt"`
UpdatedAt time.Time `bun:"updated_at" json:"updatedAt"`
}
type UserAuditable struct {
CreatedBy string `bun:"created_by,type:text" json:"createdBy"`
UpdatedBy string `bun:"updated_by,type:text" json:"updatedBy"`
}
```
Compose them in a database model:
```go
type StorablePlannedMaintenance struct {
bun.BaseModel `bun:"table:planned_maintenance"`
types.Identifiable // adds ID (UUID primary key)
types.TimeAuditable // adds CreatedAt, UpdatedAt
types.UserAuditable // adds CreatedBy, UpdatedBy
Name string `bun:"name,type:text,notnull"`
Description string `bun:"description,type:text"`
}
```
See [SQL](sql.md) for full database patterns including migrations and queries.
## Constructors
Constructors validate inputs and return a ready-to-use value:
```go
// New<Type> validates and returns a pointer + error
func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUID) (*User, error) {
if email.IsZero() {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required")
}
if role == "" {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "role is required")
}
if orgID.IsZero() {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is required")
}
return &User{
Identifiable: Identifiable{ID: valuer.GenerateUUID()},
DisplayName: displayName,
Email: email,
Role: role,
OrgID: orgID,
TimeAuditable: TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
}, nil
}
```
Follow these conventions:
- **`New<Type>(args) (*Type, error)`**: validates inputs, returns an error on failure. Use this in production code.
- **Validation at construction**: check required fields, format constraints, and invariants in the constructor. Callers should not need to validate after construction.
- **Generate IDs internally**: constructors call `valuer.GenerateUUID()` callers do not pass IDs in.
- **Set timestamps internally**: constructors set `CreatedAt` and `UpdatedAt` to `time.Now()`.
## Typed domain values (`pkg/valuer/`)
The `pkg/valuer` package provides typed wrappers for common domain values. These types carry validation, normalization, and consistent serialization (JSON, SQL, text) that raw Go primitives do not.
| Type | Wraps | Invariant |
|---|---|---|
| `valuer.UUID` | `google/uuid.UUID` | Valid UUIDv7, generated via `GenerateUUID()` |
| `valuer.Email` | `string` | Valid email format, lowercased and trimmed |
| `valuer.String` | `string` | Lowercased and trimmed |
| `valuer.TextDuration` | `time.Duration` | Valid duration, text-serializable |
### When to use a valuer type
Use a valuer type instead of a raw primitive when the value represents a domain concept with any of:
- **Enums**: All enums in the codebase must be backed by `valuer.String`. Do not use raw `string` constants or `iota`-based `int` enums. A struct embedding `valuer.String` with predefined variables gives you normalization, serialization, and an `Enum()` method for OpenAPI schema generation in one place.
- **Validation**: emails must match a format, UUIDs must be parseable, durations must be valid.
- **Normalization**: `valuer.String` lowercases and trims input, so comparisons are consistent throughout the system.
- **Serialization boundary**: the value is stored in a database, sent over the wire, or bound from an HTTP parameter. Valuer types implement `Scan`, `Value`, `MarshalJSON`, `UnmarshalJSON`, and `UnmarshalParam` consistently.
```go
// Wrong: raw string constant with no validation or normalization.
const SignalTraces = "traces"
// Right: valuer-backed type that normalizes and serializes consistently.
type Signal struct {
valuer.String
}
var SignalTraces = Signal{valuer.NewString("traces")}
```
Only primitive domain types that serve as shared infrastructure belong in `pkg/valuer`. If you need a new base type (like `Email` or `TextDuration`) that multiple packages will embed for validation and serialization, add it there. Domain-specific types that build on top of a valuer (like `Signal` embedding `valuer.String`) belong in their own domain package, not in `pkg/valuer`.
### The `Valuer` interface
Every valuer type implements the `Valuer` interface, which gives you serialization for free:
```go
type Valuer interface {
IsZero() bool // check for zero value
StringValue() string // raw string representation
fmt.Stringer // String() for printing
json.Marshaler / json.Unmarshaler // JSON
sql.Scanner / driver.Valuer // database
encoding.TextMarshaler / TextUnmarshaler // text
ginbinding.BindUnmarshaler // HTTP query/path params
}
```
Use them in struct fields:
```go
type User struct {
Identifiable
Email valuer.Email `bun:"email" json:"email"`
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
}
```
## Wrappers must add semantics, not just rename
A wrapper type is justified when it adds meaning, validation, or invariants that the underlying type does not carry. It is not justified when it merely renames fields or reorganizes the same data into a different shape.
```go
// Justified: adds validation that the underlying string does not carry.
type OrgID struct{ value string }
func NewOrgID(s string) (OrgID, error) { /* validates format */ }
// Not justified: renames fields with no new invariant or behavior.
type UserInfo struct {
Name string // same as source.Name
Email string // same as source.Email
}
```
Ask: what does the wrapper guarantee that the underlying type does not? If the answer is nothing, use the underlying type directly.
## When a new type IS warranted
A new type earns its place when it meets **at least one** of these criteria:
- **Serialization boundary**: It must be persisted, sent over the wire, or written to config. The source type is unsuitable (unexported fields, function pointers, cycles).
- **Invariant enforcement**: The constructor or methods enforce constraints that raw data does not carry (e.g., non-empty, validated format, bounded range).
- **Multiple distinct consumers**: Three or more call sites use the type in meaningfully different ways. The type is the shared vocabulary between them.
- **Dependency firewall**: The type lives in a lightweight package so that consumers avoid importing a heavy dependency.
See [Abstractions](abstractions.md) for the full set of rules on when abstractions are and aren't justified.
## Store interfaces
Each domain type package defines a store interface for persistence. The store interface lives alongside the types it operates on:
```go
// From pkg/types/ruletypes/maintenance.go
type MaintenanceStore interface {
CreatePlannedMaintenance(context.Context, GettablePlannedMaintenance) (valuer.UUID, error)
DeletePlannedMaintenance(context.Context, valuer.UUID) error
GetPlannedMaintenanceByID(context.Context, valuer.UUID) (*GettablePlannedMaintenance, error)
EditPlannedMaintenance(context.Context, GettablePlannedMaintenance, valuer.UUID) error
GetAllPlannedMaintenance(context.Context, string) ([]*GettablePlannedMaintenance, error)
}
```
Conventions:
- Name the interface `<Domain>Store` (e.g., `UserStore`, `MaintenanceStore`).
- Accept `context.Context` as the first parameter.
- Use typed values (`valuer.UUID`, `valuer.Email`) instead of raw strings for identifiers.
- Implementations go in separate packages (e.g., `sqlstore/`), see [SQL](sql.md) for details.
## What should I remember?
- Shared types live in `pkg/types/`, domain types in `pkg/types/<domain>types/`.
- No domain logic in type packages only data structures, constants, and simple methods.
- Use `Storable`, `Gettable`, `Postable` prefixes when API or database representation differs from the domain type.
- Embed `Identifiable`, `TimeAuditable`, and `UserAuditable` for standard fields instead of repeating them.
- Constructors (`New<Type>`) validate, generate IDs, and set timestamps.
- Use `pkg/valuer/` types instead of raw strings for domain identifiers like UUIDs and emails.
- Store interfaces live alongside the types they persist and use `context.Context` as the first parameter.

View File

@@ -170,7 +170,7 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId), types.UserStatusActive)
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
}

View File

@@ -2,7 +2,7 @@ import { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
import { AxiosError } from 'axios';
import APIError from 'types/api/error';
// @deprecated Use convertToApiError instead
// Handles errors from generated API hooks (which use RenderErrorResponseDTO)
export function ErrorResponseHandlerForGeneratedAPIs(
error: AxiosError<RenderErrorResponseDTO>,
): never {
@@ -46,34 +46,3 @@ export function ErrorResponseHandlerForGeneratedAPIs(
},
});
}
// convertToApiError converts an AxiosError from generated API
// hooks into an APIError.
export function convertToApiError(
error: AxiosError<RenderErrorResponseDTO> | null,
): APIError | undefined {
if (!error) {
return undefined;
}
const response = error.response;
const errorData = response?.data?.error;
return new APIError({
httpStatusCode: response?.status || error.status || 500,
error: {
code:
errorData?.code ||
String(response?.status || error.code || 'unknown_error'),
message:
errorData?.message ||
response?.statusText ||
error.message ||
'Something went wrong',
url: errorData?.url ?? '',
errors: (errorData?.errors ?? []).map((e) => ({
message: e.message ?? '',
})),
},
});
}

View File

@@ -2415,6 +2415,47 @@ export interface TypesIdentifiableDTO {
id: string;
}
export interface TypesInviteDTO {
/**
* @type string
* @format date-time
*/
createdAt?: Date;
/**
* @type string
*/
email?: string;
/**
* @type string
*/
id: string;
/**
* @type string
*/
inviteLink?: string;
/**
* @type string
*/
name?: string;
/**
* @type string
*/
orgId?: string;
/**
* @type string
*/
role?: string;
/**
* @type string
*/
token?: string;
/**
* @type string
* @format date-time
*/
updatedAt?: Date;
}
export interface TypesOrganizationDTO {
/**
* @type string
@@ -2465,6 +2506,25 @@ export interface TypesPostableAPIKeyDTO {
role?: string;
}
export interface TypesPostableAcceptInviteDTO {
/**
* @type string
*/
displayName?: string;
/**
* @type string
*/
password?: string;
/**
* @type string
*/
sourceUrl?: string;
/**
* @type string
*/
token?: string;
}
export interface TypesPostableForgotPasswordDTO {
/**
* @type string
@@ -2605,10 +2665,6 @@ export interface TypesUserDTO {
* @type string
*/
role?: string;
/**
* @type string
*/
status?: string;
/**
* @type string
* @format date-time
@@ -2961,7 +3017,40 @@ export type GetGlobalConfig200 = {
status: string;
};
export type ListInvite200 = {
/**
* @type array
*/
data: TypesInviteDTO[];
/**
* @type string
*/
status: string;
};
export type CreateInvite201 = {
data: TypesInviteDTO;
/**
* @type string
*/
status: string;
};
export type DeleteInvitePathParameters = {
id: string;
};
export type GetInvitePathParameters = {
token: string;
};
export type GetInvite200 = {
data: TypesInviteDTO;
/**
* @type string
*/
status: string;
};
export type AcceptInvite201 = {
data: TypesUserDTO;
/**
* @type string

View File

@@ -20,20 +20,26 @@ import { useMutation, useQuery } from 'react-query';
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
import type {
AcceptInvite201,
ChangePasswordPathParameters,
CreateAPIKey201,
CreateInvite201,
DeleteInvitePathParameters,
DeleteUserPathParameters,
GetInvite200,
GetInvitePathParameters,
GetMyUser200,
GetResetPasswordToken200,
GetResetPasswordTokenPathParameters,
GetUser200,
GetUserPathParameters,
ListAPIKeys200,
ListInvite200,
ListUsers200,
RenderErrorResponseDTO,
RevokeAPIKeyPathParameters,
TypesChangePasswordRequestDTO,
TypesPostableAcceptInviteDTO,
TypesPostableAPIKeyDTO,
TypesPostableForgotPasswordDTO,
TypesPostableInviteDTO,
@@ -252,6 +258,84 @@ export const invalidateGetResetPasswordToken = async (
return queryClient;
};
/**
* This endpoint lists all invites
* @summary List invites
*/
export const listInvite = (signal?: AbortSignal) => {
return GeneratedAPIInstance<ListInvite200>({
url: `/api/v1/invite`,
method: 'GET',
signal,
});
};
export const getListInviteQueryKey = () => {
return [`/api/v1/invite`] as const;
};
export const getListInviteQueryOptions = <
TData = Awaited<ReturnType<typeof listInvite>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof listInvite>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getListInviteQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof listInvite>>> = ({
signal,
}) => listInvite(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof listInvite>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type ListInviteQueryResult = NonNullable<
Awaited<ReturnType<typeof listInvite>>
>;
export type ListInviteQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary List invites
*/
export function useListInvite<
TData = Awaited<ReturnType<typeof listInvite>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof listInvite>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getListInviteQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary List invites
*/
export const invalidateListInvite = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getListInviteQueryKey() },
options,
);
return queryClient;
};
/**
* This endpoint creates an invite for a user
* @summary Create invite
@@ -335,6 +419,257 @@ export const useCreateInvite = <
return useMutation(mutationOptions);
};
/**
* This endpoint deletes an invite by id
* @summary Delete invite
*/
export const deleteInvite = ({ id }: DeleteInvitePathParameters) => {
return GeneratedAPIInstance<void>({
url: `/api/v1/invite/${id}`,
method: 'DELETE',
});
};
export const getDeleteInviteMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteInvite>>,
TError,
{ pathParams: DeleteInvitePathParameters },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof deleteInvite>>,
TError,
{ pathParams: DeleteInvitePathParameters },
TContext
> => {
const mutationKey = ['deleteInvite'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof deleteInvite>>,
{ pathParams: DeleteInvitePathParameters }
> = (props) => {
const { pathParams } = props ?? {};
return deleteInvite(pathParams);
};
return { mutationFn, ...mutationOptions };
};
export type DeleteInviteMutationResult = NonNullable<
Awaited<ReturnType<typeof deleteInvite>>
>;
export type DeleteInviteMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Delete invite
*/
export const useDeleteInvite = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteInvite>>,
TError,
{ pathParams: DeleteInvitePathParameters },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof deleteInvite>>,
TError,
{ pathParams: DeleteInvitePathParameters },
TContext
> => {
const mutationOptions = getDeleteInviteMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint gets an invite by token
* @summary Get invite
*/
export const getInvite = (
{ token }: GetInvitePathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetInvite200>({
url: `/api/v1/invite/${token}`,
method: 'GET',
signal,
});
};
export const getGetInviteQueryKey = ({ token }: GetInvitePathParameters) => {
return [`/api/v1/invite/${token}`] as const;
};
export const getGetInviteQueryOptions = <
TData = Awaited<ReturnType<typeof getInvite>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ token }: GetInvitePathParameters,
options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof getInvite>>, TError, TData>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetInviteQueryKey({ token });
const queryFn: QueryFunction<Awaited<ReturnType<typeof getInvite>>> = ({
signal,
}) => getInvite({ token }, signal);
return {
queryKey,
queryFn,
enabled: !!token,
...queryOptions,
} as UseQueryOptions<Awaited<ReturnType<typeof getInvite>>, TError, TData> & {
queryKey: QueryKey;
};
};
export type GetInviteQueryResult = NonNullable<
Awaited<ReturnType<typeof getInvite>>
>;
export type GetInviteQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Get invite
*/
export function useGetInvite<
TData = Awaited<ReturnType<typeof getInvite>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ token }: GetInvitePathParameters,
options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof getInvite>>, TError, TData>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetInviteQueryOptions({ token }, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get invite
*/
export const invalidateGetInvite = async (
queryClient: QueryClient,
{ token }: GetInvitePathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetInviteQueryKey({ token }) },
options,
);
return queryClient;
};
/**
* This endpoint accepts an invite by token
* @summary Accept invite
*/
export const acceptInvite = (
typesPostableAcceptInviteDTO: BodyType<TypesPostableAcceptInviteDTO>,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<AcceptInvite201>({
url: `/api/v1/invite/accept`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: typesPostableAcceptInviteDTO,
signal,
});
};
export const getAcceptInviteMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof acceptInvite>>,
TError,
{ data: BodyType<TypesPostableAcceptInviteDTO> },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof acceptInvite>>,
TError,
{ data: BodyType<TypesPostableAcceptInviteDTO> },
TContext
> => {
const mutationKey = ['acceptInvite'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof acceptInvite>>,
{ data: BodyType<TypesPostableAcceptInviteDTO> }
> = (props) => {
const { data } = props ?? {};
return acceptInvite(data);
};
return { mutationFn, ...mutationOptions };
};
export type AcceptInviteMutationResult = NonNullable<
Awaited<ReturnType<typeof acceptInvite>>
>;
export type AcceptInviteMutationBody = BodyType<TypesPostableAcceptInviteDTO>;
export type AcceptInviteMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Accept invite
*/
export const useAcceptInvite = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof acceptInvite>>,
TError,
{ data: BodyType<TypesPostableAcceptInviteDTO> },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof acceptInvite>>,
TError,
{ data: BodyType<TypesPostableAcceptInviteDTO> },
TContext
> => {
const mutationOptions = getAcceptInviteMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint creates a bulk invite for a user
* @summary Create bulk invite

View File

@@ -0,0 +1,54 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { TreemapViewType } from 'container/MetricsExplorer/Summary/types';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
export interface MetricsTreeMapPayload {
filters: TagFilter;
limit?: number;
treemap?: TreemapViewType;
}
export interface MetricsTreeMapResponse {
status: string;
data: {
[TreemapViewType.TIMESERIES]: TimeseriesData[];
[TreemapViewType.SAMPLES]: SamplesData[];
};
}
export interface TimeseriesData {
percentage: number;
total_value: number;
metric_name: string;
}
export interface SamplesData {
percentage: number;
metric_name: string;
}
export const getMetricsTreeMap = async (
props: MetricsTreeMapPayload,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<MetricsTreeMapResponse> | ErrorResponse> => {
try {
const response = await axios.post('/metrics/treemap', props, {
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
params: props,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -0,0 +1,36 @@
import axios from 'api';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { Temporality } from './getMetricDetails';
import { MetricType } from './getMetricsList';
export interface UpdateMetricMetadataProps {
description: string;
metricType: MetricType;
temporality?: Temporality;
isMonotonic?: boolean;
unit?: string;
}
export interface UpdateMetricMetadataResponse {
success: boolean;
message: string;
}
const updateMetricMetadata = async (
metricName: string,
props: UpdateMetricMetadataProps,
): Promise<SuccessResponse<UpdateMetricMetadataResponse> | ErrorResponse> => {
const response = await axios.post(`/metrics/${metricName}/metadata`, {
...props,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data.data,
};
};
export default updateMetricMetadata;

View File

@@ -49,6 +49,7 @@ export const REACT_QUERY_KEY = {
// Metrics Explorer Query Keys
GET_METRICS_LIST: 'GET_METRICS_LIST',
GET_METRICS_TREE_MAP: 'GET_METRICS_TREE_MAP',
GET_METRICS_LIST_FILTER_KEYS: 'GET_METRICS_LIST_FILTER_KEYS',
GET_METRICS_LIST_FILTER_VALUES: 'GET_METRICS_LIST_FILTER_VALUES',
GET_METRIC_DETAILS: 'GET_METRIC_DETAILS',

View File

@@ -1,4 +1,3 @@
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import { PrecisionOption } from 'components/Graph/types';
import { LegendConfig, TooltipRenderArgs } from 'lib/uPlotV2/components/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
@@ -9,7 +8,7 @@ interface BaseChartProps {
height: number;
showTooltip?: boolean;
showLegend?: boolean;
timezone?: Timezone;
timezone: string;
canPinTooltip?: boolean;
yAxisUnit?: string;
decimalPrecision?: PrecisionOption;

View File

@@ -129,12 +129,12 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
onDestroy={onPlotDestroy}
yAxisUnit={widget.yAxisUnit}
decimalPrecision={widget.decimalPrecision}
timezone={timezone.value}
data={chartData as uPlot.AlignedData}
width={containerDimensions.width}
height={containerDimensions.height}
layoutChildren={layoutChildren}
isStackedBarChart={widget.stackedBarChart ?? false}
timezone={timezone}
>
<ContextMenu
coordinates={coordinates}

View File

@@ -5,7 +5,12 @@ import { getInitialStackedBands } from 'container/DashboardContainer/visualizati
import { getLegend } from 'lib/dashboard/getQueryResults';
import getLabelName from 'lib/getLabelName';
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
import { DrawStyle } from 'lib/uPlotV2/config/types';
import {
DrawStyle,
LineInterpolation,
LineStyle,
VisibilityMode,
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { get } from 'lodash-es';
import { Widgets } from 'types/api/dashboard/getAll';
@@ -58,12 +63,7 @@ export function prepareBarPanelConfig({
const minStepInterval = Math.min(...Object.values(stepIntervals));
const builder = buildBaseConfig({
id: widget.id,
thresholds: widget.thresholds,
yAxisUnit: widget.yAxisUnit,
softMin: widget.softMin ?? undefined,
softMax: widget.softMax ?? undefined,
isLogScale: widget.isLogScale,
widget,
isDarkMode,
onClick,
onDragSelect,
@@ -98,8 +98,14 @@ export function prepareBarPanelConfig({
builder.addSeries({
scaleKey: 'y',
drawStyle: DrawStyle.Bar,
panelType: PANEL_TYPES.BAR,
label: label,
colorMapping: widget.customLegendColors ?? {},
spanGaps: false,
lineStyle: LineStyle.Solid,
lineInterpolation: LineInterpolation.Spline,
showPoints: VisibilityMode.Never,
pointSize: 5,
isDarkMode,
stepInterval: currentStepInterval,
});

View File

@@ -100,7 +100,7 @@ function HistogramPanel(props: PanelWrapperProps): JSX.Element {
yAxisUnit={widget.yAxisUnit}
decimalPrecision={widget.decimalPrecision}
syncMode={DashboardCursorSync.Crosshair}
timezone={timezone}
timezone={timezone.value}
data={chartData as uPlot.AlignedData}
width={containerDimensions.width}
height={containerDimensions.height}

View File

@@ -154,12 +154,7 @@ export function prepareHistogramPanelConfig({
isDarkMode: boolean;
}): UPlotConfigBuilder {
const builder = buildBaseConfig({
id: widget.id,
thresholds: widget.thresholds,
yAxisUnit: widget.yAxisUnit,
softMin: widget.softMin ?? undefined,
softMax: widget.softMax ?? undefined,
isLogScale: widget.isLogScale,
widget,
isDarkMode,
apiResponse,
panelMode,
@@ -196,8 +191,10 @@ export function prepareHistogramPanelConfig({
builder.addSeries({
label: '',
scaleKey: 'y',
drawStyle: DrawStyle.Histogram,
drawStyle: DrawStyle.Bar,
panelType: PANEL_TYPES.HISTOGRAM,
colorMapping: widget.customLegendColors ?? {},
spanGaps: false,
barWidthFactor: 1,
pointSize: 5,
lineColor: '#3f5ecc',
@@ -219,8 +216,10 @@ export function prepareHistogramPanelConfig({
builder.addSeries({
label: label,
scaleKey: 'y',
drawStyle: DrawStyle.Histogram,
drawStyle: DrawStyle.Bar,
panelType: PANEL_TYPES.HISTOGRAM,
colorMapping: widget.customLegendColors ?? {},
spanGaps: false,
barWidthFactor: 1,
pointSize: 5,
isDarkMode,

View File

@@ -118,7 +118,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
}}
yAxisUnit={widget.yAxisUnit}
decimalPrecision={widget.decimalPrecision}
timezone={timezone}
timezone={timezone.value}
data={chartData as uPlot.AlignedData}
width={containerDimensions.width}
height={containerDimensions.height}

View File

@@ -82,12 +82,7 @@ export const prepareUPlotConfig = ({
const minStepInterval = Math.min(...Object.values(stepIntervals));
const builder = buildBaseConfig({
id: widget.id,
thresholds: widget.thresholds,
yAxisUnit: widget.yAxisUnit,
softMin: widget.softMin ?? undefined,
softMax: widget.softMax ?? undefined,
isLogScale: widget.isLogScale,
widget,
isDarkMode,
onClick,
onDragSelect,
@@ -125,6 +120,7 @@ export const prepareUPlotConfig = ({
: VisibilityMode.Never,
pointSize: 5,
isDarkMode,
panelType: PANEL_TYPES.TIME_SERIES,
});
});

View File

@@ -1,11 +1,11 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import { STEP_INTERVAL_MULTIPLIER } from 'lib/uPlotV2/constants';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import { PanelMode } from '../../types';
import { BaseConfigBuilderProps, buildBaseConfig } from '../baseConfigBuilder';
import { buildBaseConfig } from '../baseConfigBuilder';
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
@@ -27,25 +27,16 @@ jest.mock('lib/uPlotLib/plugins/onClickPlugin', () => ({
default: jest.fn().mockReturnValue({ name: 'onClickPlugin' }),
}));
const createBaseConfigBuilderProps = (
overrides: Partial<
Pick<
BaseConfigBuilderProps,
'id' | 'yAxisUnit' | 'isLogScale' | 'softMin' | 'softMax' | 'thresholds'
>
> = {},
): Pick<
BaseConfigBuilderProps,
'id' | 'yAxisUnit' | 'isLogScale' | 'softMin' | 'softMax' | 'thresholds'
> => ({
id: 'widget-1',
yAxisUnit: 'ms',
isLogScale: false,
softMin: undefined,
softMax: undefined,
thresholds: [],
...overrides,
});
const createWidget = (overrides: Partial<Widgets> = {}): Widgets =>
({
id: 'widget-1',
yAxisUnit: 'ms',
isLogScale: false,
softMin: undefined,
softMax: undefined,
thresholds: [],
...overrides,
} as Widgets);
const createApiResponse = (
overrides: Partial<MetricRangePayloadProps> = {},
@@ -56,7 +47,7 @@ const createApiResponse = (
} as MetricRangePayloadProps);
const baseProps = {
...createBaseConfigBuilderProps(),
widget: createWidget(),
apiResponse: createApiResponse(),
isDarkMode: true,
panelMode: PanelMode.DASHBOARD_VIEW,
@@ -72,14 +63,14 @@ describe('buildBaseConfig', () => {
expect(typeof builder.getLegendItems).toBe('function');
});
it('configures builder with id and DASHBOARD_VIEW preferences', () => {
it('configures builder with widgetId and DASHBOARD_VIEW preferences', () => {
const builder = buildBaseConfig({
...baseProps,
panelMode: PanelMode.DASHBOARD_VIEW,
...createBaseConfigBuilderProps({ id: 'my-widget' }),
widget: createWidget({ id: 'my-widget' }),
});
expect(builder.getId()).toBe('my-widget');
expect(builder.getWidgetId()).toBe('my-widget');
expect(builder.getShouldSaveSelectionPreference()).toBe(true);
});
@@ -136,7 +127,7 @@ describe('buildBaseConfig', () => {
it('configures log scale on y axis when widget.isLogScale is true', () => {
const builder = buildBaseConfig({
...baseProps,
...createBaseConfigBuilderProps({ isLogScale: true }),
widget: createWidget({ isLogScale: true }),
});
const config = builder.getConfig();
@@ -180,7 +171,7 @@ describe('buildBaseConfig', () => {
it('adds thresholds from widget', () => {
const builder = buildBaseConfig({
...baseProps,
...createBaseConfigBuilderProps({
widget: createWidget({
thresholds: [
{
thresholdValue: 80,
@@ -188,7 +179,7 @@ describe('buildBaseConfig', () => {
thresholdUnit: 'ms',
thresholdLabel: 'High',
},
] as ThresholdProps[],
] as Widgets['thresholds'],
}),
});

View File

@@ -1,6 +1,5 @@
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { ThresholdProps } from 'container/NewWidget/RightContainer/Threshold/types';
import onClickPlugin, {
OnClickPluginOpts,
} from 'lib/uPlotLib/plugins/onClickPlugin';
@@ -10,32 +9,28 @@ import {
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import { PanelMode } from '../types';
export interface BaseConfigBuilderProps {
id: string;
thresholds?: ThresholdProps[];
widget: Widgets;
apiResponse: MetricRangePayloadProps;
isDarkMode: boolean;
onClick?: OnClickPluginOpts['onClick'];
onDragSelect?: (startTime: number, endTime: number) => void;
timezone?: Timezone;
panelMode?: PanelMode;
panelMode: PanelMode;
panelType: PANEL_TYPES;
minTimeScale?: number;
maxTimeScale?: number;
stepInterval?: number;
isLogScale?: boolean;
yAxisUnit?: string;
softMin?: number;
softMax?: number;
}
export function buildBaseConfig({
id,
widget,
isDarkMode,
onClick,
onDragSelect,
@@ -43,14 +38,9 @@ export function buildBaseConfig({
timezone,
panelMode,
panelType,
thresholds,
minTimeScale,
maxTimeScale,
stepInterval,
isLogScale,
yAxisUnit,
softMin,
softMax,
}: BaseConfigBuilderProps): UPlotConfigBuilder {
const tzDate = timezone
? (timestamp: number): Date =>
@@ -58,27 +48,28 @@ export function buildBaseConfig({
: undefined;
const builder = new UPlotConfigBuilder({
id,
onDragSelect,
widgetId: widget.id,
tzDate,
shouldSaveSelectionPreference: panelMode === PanelMode.DASHBOARD_VIEW,
selectionPreferencesSource: panelMode
? [PanelMode.DASHBOARD_VIEW, PanelMode.STANDALONE_VIEW].includes(panelMode)
? SelectionPreferencesSource.LOCAL_STORAGE
: SelectionPreferencesSource.IN_MEMORY
selectionPreferencesSource: [
PanelMode.DASHBOARD_VIEW,
PanelMode.STANDALONE_VIEW,
].includes(panelMode)
? SelectionPreferencesSource.LOCAL_STORAGE
: SelectionPreferencesSource.IN_MEMORY,
stepInterval,
});
const thresholdOptions: ThresholdsDrawHookOptions = {
scaleKey: 'y',
thresholds: (thresholds || []).map((threshold) => ({
thresholds: (widget.thresholds || []).map((threshold) => ({
thresholdValue: threshold.thresholdValue ?? 0,
thresholdColor: threshold.thresholdColor,
thresholdUnit: threshold.thresholdUnit,
thresholdLabel: threshold.thresholdLabel,
})),
yAxisUnit: yAxisUnit,
yAxisUnit: widget.yAxisUnit,
};
builder.addThresholds(thresholdOptions);
@@ -88,8 +79,8 @@ export function buildBaseConfig({
time: true,
min: minTimeScale,
max: maxTimeScale,
logBase: isLogScale ? 10 : undefined,
distribution: isLogScale
logBase: widget.isLogScale ? 10 : undefined,
distribution: widget.isLogScale
? DistributionType.Logarithmic
: DistributionType.Linear,
});
@@ -100,11 +91,11 @@ export function buildBaseConfig({
time: false,
min: undefined,
max: undefined,
softMin: softMin,
softMax: softMax,
softMin: widget.softMin ?? undefined,
softMax: widget.softMax ?? undefined,
thresholds: thresholdOptions,
logBase: isLogScale ? 10 : undefined,
distribution: isLogScale
logBase: widget.isLogScale ? 10 : undefined,
distribution: widget.isLogScale
? DistributionType.Logarithmic
: DistributionType.Linear,
});
@@ -123,7 +114,7 @@ export function buildBaseConfig({
show: true,
side: 2,
isDarkMode,
isLogScale,
isLogScale: widget.isLogScale,
panelType,
});
@@ -132,8 +123,8 @@ export function buildBaseConfig({
show: true,
side: 3,
isDarkMode,
isLogScale,
yAxisUnit,
isLogScale: widget.isLogScale,
yAxisUnit: widget.yAxisUnit,
panelType,
});

View File

@@ -15,7 +15,7 @@ export const getRandomColor = (): string => {
};
export const DATASOURCE_VS_ROUTES: Record<DataSource, string> = {
[DataSource.METRICS]: ROUTES.METRICS_EXPLORER_EXPLORER,
[DataSource.METRICS]: ROUTES.METRICS_EXPLORER,
[DataSource.TRACES]: ROUTES.TRACES_EXPLORER,
[DataSource.LOGS]: ROUTES.LOGS_EXPLORER,
};

View File

@@ -190,11 +190,6 @@
.ant-table-cell:nth-child(n + 3) {
padding-right: 24px;
}
.status-header {
display: flex;
align-items: center;
gap: 4px;
}
.memory-usage-header {
display: flex;
align-items: center;

View File

@@ -146,14 +146,7 @@ export const getHostsListColumns = (): ColumnType<HostRowData>[] => [
),
},
{
title: (
<div className="status-header">
Status
<Tooltip title="Sent system metrics in last 10 mins">
<InfoCircleOutlined />
</Tooltip>
</div>
),
title: 'Status',
dataIndex: 'active',
key: 'active',
width: 100,

View File

@@ -12,21 +12,14 @@ import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interface
import DateTimeSelector from 'container/TopNav/DateTimeSelectionV2';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
import {
ICurrentQueryData,
useHandleExplorerTabChange,
} from 'hooks/useHandleExplorerTabChange';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import { isEmpty } from 'lodash-es';
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
import { ExplorerViews } from 'pages/LogsExplorer/utils';
import { Warning } from 'types/api';
import { Dashboard } from 'types/api/dashboard/getAll';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { MetricAggregation } from 'types/api/v5/queryRange';
import { DataSource } from 'types/common/queryBuilder';
import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink';
import { explorerViewToPanelType } from 'utils/explorerUtils';
import { v4 as uuid } from 'uuid';
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
@@ -49,20 +42,15 @@ function Explorer(): JSX.Element {
stagedQuery,
updateAllQueriesOperators,
currentQuery,
handleSetConfig,
} = useQueryBuilder();
const { safeNavigate } = useSafeNavigate();
const { handleExplorerTabChange } = useHandleExplorerTabChange();
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(false);
const metricNames = useMemo(() => {
const currentMetricNames: string[] = [];
stagedQuery?.builder.queryData.forEach((query) => {
const metricName =
query.aggregateAttribute?.key ||
(query.aggregations?.[0] as MetricAggregation | undefined)?.metricName;
if (metricName) {
currentMetricNames.push(metricName);
if (query.aggregateAttribute?.key) {
currentMetricNames.push(query.aggregateAttribute?.key);
}
});
return currentMetricNames;
@@ -188,16 +176,6 @@ function Explorer(): JSX.Element {
useShareBuilderUrl({ defaultValue: defaultQuery });
const handleChangeSelectedView = useCallback(
(view: ExplorerViews, querySearchParameters?: ICurrentQueryData): void => {
const nextPanelType =
explorerViewToPanelType[view] || PANEL_TYPES.TIME_SERIES;
handleSetConfig(nextPanelType, DataSource.METRICS);
handleExplorerTabChange(nextPanelType, querySearchParameters);
},
[handleSetConfig, handleExplorerTabChange],
);
const handleExport = useCallback(
(
dashboard: Dashboard | null,
@@ -370,7 +348,6 @@ function Explorer(): JSX.Element {
onExport={handleExport}
isOneChartPerQuery={showOneChartPerQuery}
splitedQueries={splitedQueries}
handleChangeSelectedView={handleChangeSelectedView}
/>
{isMetricDetailsOpen && selectedMetricName && (
<MetricDetails

View File

@@ -12,7 +12,6 @@ import { initialQueriesMap } from 'constants/queryBuilder';
import * as useOptionsMenuHooks from 'container/OptionsMenu';
import * as useUpdateDashboardHooks from 'hooks/dashboard/useUpdateDashboard';
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
import * as useHandleExplorerTabChangeHooks from 'hooks/useHandleExplorerTabChange';
import * as appContextHooks from 'providers/App/App';
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
import * as timezoneHooks from 'providers/Timezone';
@@ -30,8 +29,6 @@ const queryClient = new QueryClient();
const mockUpdateAllQueriesOperators = jest
.fn()
.mockReturnValue(initialQueriesMap[DataSource.METRICS]);
const mockHandleSetConfig = jest.fn();
const mockHandleExplorerTabChange = jest.fn();
const mockUseQueryBuilderData = {
handleRunQuery: jest.fn(),
stagedQuery: initialQueriesMap[DataSource.METRICS],
@@ -43,7 +40,7 @@ const mockUseQueryBuilderData = {
handleSetQueryData: jest.fn(),
handleSetFormulaData: jest.fn(),
handleSetQueryItemData: jest.fn(),
handleSetConfig: mockHandleSetConfig,
handleSetConfig: jest.fn(),
removeQueryBuilderEntityByIndex: jest.fn(),
removeQueryTypeItemByIndex: jest.fn(),
isDefaultQuery: jest.fn(),
@@ -138,11 +135,6 @@ jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
...mockUseQueryBuilderData,
} as any);
jest
.spyOn(useHandleExplorerTabChangeHooks, 'useHandleExplorerTabChange')
.mockReturnValue({
handleExplorerTabChange: mockHandleExplorerTabChange,
});
const Y_AXIS_UNIT_SELECTOR_TEST_ID = 'y-axis-unit-selector';
@@ -390,109 +382,4 @@ describe('Explorer', () => {
expect(oneChartPerQueryToggle).toBeEnabled();
expect(oneChartPerQueryToggle).not.toBeChecked();
});
describe('loading saved views with v5 query format', () => {
const EMPTY_STATE_TEXT = 'Select a metric and run a query to see the results';
it('should show empty state when no metric is selected', () => {
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({}),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [],
});
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
...mockUseQueryBuilderData,
} as any);
renderExplorer();
expect(screen.getByText(EMPTY_STATE_TEXT)).toBeInTheDocument();
});
it('should not show empty state when saved view has v5 aggregations format', () => {
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({}),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_METADATA],
});
// saved view loaded back from v5 format
// aggregateAttribute.key is empty (lost in v3/v4 -> v5 -> v3/v4 round trip)
// but aggregations[0].metricName has metric name
// TODO(srikanthccv): remove this mess
const mockQueryData = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: '',
},
aggregations: [
{
metricName: 'http_requests_total',
temporality: 'cumulative',
timeAggregation: 'rate',
spaceAggregation: 'sum',
},
],
};
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue(({
...mockUseQueryBuilderData,
stagedQuery: {
...initialQueriesMap[DataSource.METRICS],
builder: {
...initialQueriesMap[DataSource.METRICS].builder,
queryData: [mockQueryData],
},
},
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
renderExplorer();
expect(screen.queryByText(EMPTY_STATE_TEXT)).not.toBeInTheDocument();
});
it('should not show empty state when query uses v3 aggregateAttribute format', () => {
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({}),
mockSetSearchParams,
]);
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
isLoading: false,
isError: false,
metrics: [MOCK_METRIC_METADATA],
});
const mockQueryData = {
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
aggregateAttribute: {
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
.aggregateAttribute as BaseAutocompleteData),
key: 'system_cpu_usage',
},
};
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue(({
...mockUseQueryBuilderData,
stagedQuery: {
...initialQueriesMap[DataSource.METRICS],
builder: {
...initialQueriesMap[DataSource.METRICS].builder,
queryData: [mockQueryData],
},
},
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
renderExplorer();
expect(screen.queryByText(EMPTY_STATE_TEXT)).not.toBeInTheDocument();
});
});
});

View File

@@ -9,6 +9,7 @@ import {
} from 'api/generated/services/metrics';
import { QueryParams } from 'constants/query';
import ROUTES from 'constants/routes';
import useUrlQuery from 'hooks/useUrlQuery';
import { Bell, Grid } from 'lucide-react';
import { pluralize } from 'utils/pluralize';
@@ -17,6 +18,8 @@ import { DashboardsAndAlertsPopoverProps } from './types';
function DashboardsAndAlertsPopover({
metricName,
}: DashboardsAndAlertsPopoverProps): JSX.Element | null {
const params = useUrlQuery();
const {
data: alertsData,
isLoading: isLoadingAlerts,
@@ -68,10 +71,8 @@ function DashboardsAndAlertsPopover({
<Typography.Link
key={alert.alertId}
onClick={(): void => {
window.open(
`${ROUTES.ALERT_OVERVIEW}?${QueryParams.ruleId}=${alert.alertId}`,
'_blank',
);
params.set(QueryParams.ruleId, alert.alertId);
window.open(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`, '_blank');
}}
className="dashboards-popover-content-item"
>
@@ -81,7 +82,7 @@ function DashboardsAndAlertsPopover({
}));
}
return null;
}, [alerts]);
}, [alerts, params]);
const dashboardsPopoverContent = useMemo(() => {
if (dashboards && dashboards.length > 0) {

View File

@@ -16,6 +16,15 @@ import {
const mockWindowOpen = jest.fn();
Object.defineProperty(window, 'open', { value: mockWindowOpen });
const mockSetQuery = jest.fn();
const mockUrlQuery = {
set: mockSetQuery,
toString: jest.fn(),
};
jest.mock('hooks/useUrlQuery', () => ({
__esModule: true,
default: jest.fn(() => mockUrlQuery),
}));
const useGetMetricAlertsMock = jest.spyOn(
metricsExplorerHooks,
@@ -147,10 +156,12 @@ describe('DashboardsAndAlertsPopover', () => {
// Click on the first alert rule
await userEvent.click(screen.getByText(MOCK_ALERT_1.alertName));
expect(mockWindowOpen).toHaveBeenCalledWith(
`/alerts/overview?${QueryParams.ruleId}=${MOCK_ALERT_1.alertId}`,
'_blank',
// Should open alert in new tab
expect(mockSetQuery).toHaveBeenCalledWith(
QueryParams.ruleId,
MOCK_ALERT_1.alertId,
);
expect(mockWindowOpen).toHaveBeenCalled();
});
it('renders unique dashboards even when there are duplicates', async () => {

View File

@@ -0,0 +1,76 @@
import { useMemo } from 'react';
import { Color } from '@signozhq/design-tokens';
import { Typography } from 'antd';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import {
BarChart,
BarChart2,
BarChartHorizontal,
Diff,
Gauge,
} from 'lucide-react';
import { METRIC_TYPE_LABEL_MAP } from './constants';
// TODO: @amlannandy Delete this component after API migration is complete
function MetricTypeRenderer({ type }: { type: MetricType }): JSX.Element {
const [icon, color] = useMemo(() => {
switch (type) {
case MetricType.SUM:
return [
<Diff key={type} size={12} color={Color.BG_ROBIN_500} />,
Color.BG_ROBIN_500,
];
case MetricType.GAUGE:
return [
<Gauge key={type} size={12} color={Color.BG_SAKURA_500} />,
Color.BG_SAKURA_500,
];
case MetricType.HISTOGRAM:
return [
<BarChart2 key={type} size={12} color={Color.BG_SIENNA_500} />,
Color.BG_SIENNA_500,
];
case MetricType.SUMMARY:
return [
<BarChartHorizontal key={type} size={12} color={Color.BG_FOREST_500} />,
Color.BG_FOREST_500,
];
case MetricType.EXPONENTIAL_HISTOGRAM:
return [
<BarChart key={type} size={12} color={Color.BG_AQUA_500} />,
Color.BG_AQUA_500,
];
default:
return [null, ''];
}
}, [type]);
const metricTypeRendererStyle = useMemo(
() => ({
backgroundColor: `${color}33`,
border: `1px solid ${color}`,
color,
}),
[color],
);
const metricTypeRendererTextStyle = useMemo(
() => ({
color,
fontSize: 12,
}),
[color],
);
return (
<div className="metric-type-renderer" style={metricTypeRendererStyle}>
{icon}
<Typography.Text style={metricTypeRendererTextStyle}>
{METRIC_TYPE_LABEL_MAP[type]}
</Typography.Text>
</div>
);
}
export default MetricTypeRenderer;

View File

@@ -47,7 +47,7 @@ function MetricsSearch({
}}
onRun={handleRunQuery}
showFilterSuggestionsWithoutMetric
placeholder="Search your metrics. Try service.name='api' to see all API service metrics, or http.client for HTTP client metrics."
placeholder="Try metric_name CONTAINS 'http.server' to view all HTTP Server metrics being sent"
/>
</div>
<RunQueryBtn

View File

@@ -10,7 +10,6 @@ import {
} from 'antd';
import { SorterResult } from 'antd/es/table/interface';
import { Querybuildertypesv5OrderDirectionDTO } from 'api/generated/services/sigNoz.schemas';
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
import { Info } from 'lucide-react';
import { MetricsListItemRowData, MetricsTableProps } from './types';
@@ -19,7 +18,6 @@ import { getMetricsTableColumns } from './utils';
function MetricsTable({
isLoading,
isError,
error,
data,
pageSize,
currentPage,
@@ -73,54 +71,54 @@ function MetricsTable({
<Info size={16} />
</Tooltip>
</div>
{isError && error ? (
<ErrorInPlace error={error} />
) : (
<Table
loading={{
spinning: isLoading,
indicator: (
<Spin
data-testid="metrics-table-loading-state"
indicator={<LoadingOutlined size={14} spin />}
<Table
loading={{
spinning: isLoading,
indicator: (
<Spin
data-testid="metrics-table-loading-state"
indicator={<LoadingOutlined size={14} spin />}
/>
),
}}
dataSource={data}
columns={getMetricsTableColumns(queryFilterExpression, onFilterChange)}
locale={{
emptyText: isLoading ? null : (
<div
className="no-metrics-message-container"
data-testid={
isError ? 'metrics-table-error-state' : 'metrics-table-empty-state'
}
>
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
),
}}
dataSource={data}
columns={getMetricsTableColumns(queryFilterExpression, onFilterChange)}
locale={{
emptyText: isLoading ? null : (
<div
className="no-metrics-message-container"
data-testid="metrics-table-empty-state"
>
<img
src="/Icons/emptyState.svg"
alt="thinking-emoji"
className="empty-state-svg"
/>
<Typography.Text className="no-metrics-message">
This query had no results. Edit your query and try again!
</Typography.Text>
</div>
),
}}
tableLayout="fixed"
onChange={handleTableChange}
pagination={{
current: currentPage,
pageSize,
showSizeChanger: true,
hideOnSinglePage: false,
onChange: onPaginationChange,
total: totalCount,
}}
onRow={(record): { onClick: () => void; className: string } => ({
onClick: (): void => openMetricDetails(record.key, 'list'),
className: 'clickable-row',
})}
/>
)}
<Typography.Text className="no-metrics-message">
{isError
? 'Error fetching metrics. If the problem persists, please contact support.'
: 'This query had no results. Edit your query and try again!'}
</Typography.Text>
</div>
),
}}
tableLayout="fixed"
onChange={handleTableChange}
pagination={{
current: currentPage,
pageSize,
showSizeChanger: true,
hideOnSinglePage: false,
onChange: onPaginationChange,
total: totalCount,
}}
onRow={(record): { onClick: () => void; className: string } => ({
onClick: (): void => openMetricDetails(record.key, 'list'),
className: 'clickable-row',
})}
/>
</div>
);
}

View File

@@ -4,7 +4,6 @@ import { Group } from '@visx/group';
import { Treemap } from '@visx/hierarchy';
import { Empty, Select, Skeleton, Tooltip, Typography } from 'antd';
import { MetricsexplorertypesTreemapModeDTO } from 'api/generated/services/sigNoz.schemas';
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
import { HierarchyNode, stratify, treemapBinary } from 'd3-hierarchy';
import { Info } from 'lucide-react';
@@ -28,7 +27,6 @@ import {
function MetricsTreemapInternal({
isLoading,
isError,
error,
data,
viewType,
openMetricDetails,
@@ -93,10 +91,6 @@ function MetricsTreemapInternal({
);
}
if (isError && error) {
return <ErrorInPlace error={error} />;
}
if (isError) {
return (
<Empty
@@ -180,7 +174,6 @@ function MetricsTreemap({
data,
isLoading,
isError,
error,
openMetricDetails,
setHeatmapView,
}: MetricsTreemapProps): JSX.Element {
@@ -209,7 +202,6 @@ function MetricsTreemap({
<MetricsTreemapInternal
isLoading={isLoading}
isError={isError}
error={error}
data={data}
viewType={viewType}
openMetricDetails={openMetricDetails}

View File

@@ -4,7 +4,6 @@ import { useSelector } from 'react-redux';
import { useSearchParams } from 'react-router-dom-v5-compat';
import * as Sentry from '@sentry/react';
import logEvent from 'api/common/logEvent';
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
import {
useGetMetricsStats,
useGetMetricsTreemap,
@@ -64,20 +63,13 @@ function Summary(): JSX.Element {
MetricsexplorertypesTreemapModeDTO.samples,
);
const {
currentQuery,
stagedQuery,
redirectWithQueryBuilderData,
} = useQueryBuilder();
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
useShareBuilderUrl({ defaultValue: initialQueriesMap[DataSource.METRICS] });
const query = useMemo(
() =>
stagedQuery?.builder?.queryData?.[0] ||
initialQueriesMap[DataSource.METRICS].builder.queryData[0],
[stagedQuery],
);
const query = useMemo(() => currentQuery?.builder?.queryData[0], [
currentQuery,
]);
const [searchParams, setSearchParams] = useSearchParams();
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(
@@ -94,16 +86,14 @@ function Summary(): JSX.Element {
(state) => state.globalTime,
);
const appliedFilterExpression = query?.filter?.expression || '';
const [
currentQueryFilterExpression,
setCurrentQueryFilterExpression,
] = useState<string>(appliedFilterExpression);
] = useState<string>(query?.filter?.expression || '');
useEffect(() => {
setCurrentQueryFilterExpression(appliedFilterExpression);
}, [appliedFilterExpression]);
const [appliedFilterExpression, setAppliedFilterExpression] = useState(
query?.filter?.expression || '',
);
const queryFilterExpression = useMemo(
() => ({ expression: appliedFilterExpression }),
@@ -160,7 +150,6 @@ function Summary(): JSX.Element {
mutate: getMetricsStats,
isLoading: isGetMetricsStatsLoading,
isError: isGetMetricsStatsError,
error: metricsStatsError,
} = useGetMetricsStats();
const {
@@ -168,19 +157,8 @@ function Summary(): JSX.Element {
mutate: getMetricsTreemap,
isLoading: isGetMetricsTreemapLoading,
isError: isGetMetricsTreemapError,
error: metricsTreemapError,
} = useGetMetricsTreemap();
const metricsStatsApiError = useMemo(
() => convertToApiError(metricsStatsError),
[metricsStatsError],
);
const metricsTreemapApiError = useMemo(
() => convertToApiError(metricsTreemapError),
[metricsTreemapError],
);
useEffect(() => {
getMetricsStats({
data: metricsListQuery,
@@ -214,6 +192,8 @@ function Summary(): JSX.Element {
],
},
});
setCurrentQueryFilterExpression(expression);
setAppliedFilterExpression(expression);
setCurrentPage(1);
if (expression) {
logEvent(MetricsExplorerEvents.FilterApplied, {
@@ -310,14 +290,10 @@ function Summary(): JSX.Element {
};
const isMetricsListDataEmpty =
formattedMetricsData.length === 0 &&
!isGetMetricsStatsLoading &&
!isGetMetricsStatsError;
formattedMetricsData.length === 0 && !isGetMetricsStatsLoading;
const isMetricsTreeMapDataEmpty =
!treeMapData?.data[heatmapView]?.length &&
!isGetMetricsTreemapLoading &&
!isGetMetricsTreemapError;
!treeMapData?.data[heatmapView]?.length && !isGetMetricsTreemapLoading;
const showFullScreenLoading =
(isGetMetricsStatsLoading || isGetMetricsTreemapLoading) &&
@@ -346,7 +322,6 @@ function Summary(): JSX.Element {
data={treeMapData?.data}
isLoading={isGetMetricsTreemapLoading}
isError={isGetMetricsTreemapError}
error={metricsTreemapApiError}
viewType={heatmapView}
openMetricDetails={openMetricDetails}
setHeatmapView={handleSetHeatmapView}
@@ -354,7 +329,6 @@ function Summary(): JSX.Element {
<MetricsTable
isLoading={isGetMetricsStatsLoading}
isError={isGetMetricsStatsError}
error={metricsStatsApiError}
data={formattedMetricsData}
pageSize={pageSize}
currentPage={currentPage}

View File

@@ -0,0 +1,63 @@
import { Color } from '@signozhq/design-tokens';
import { render, screen } from '@testing-library/react';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import MetricTypeRenderer from '../MetricTypeRenderer';
jest.mock('lucide-react', () => {
return {
__esModule: true,
Diff: (): JSX.Element => <svg data-testid="diff-icon" />,
Gauge: (): JSX.Element => <svg data-testid="gauge-icon" />,
BarChart2: (): JSX.Element => <svg data-testid="bar-chart-2-icon" />,
BarChartHorizontal: (): JSX.Element => (
<svg data-testid="bar-chart-horizontal-icon" />
),
BarChart: (): JSX.Element => <svg data-testid="bar-chart-icon" />,
};
});
describe('MetricTypeRenderer', () => {
it('should render correct icon and color for each metric type', () => {
const types = [
{
type: MetricType.SUM,
color: Color.BG_ROBIN_500,
iconTestId: 'diff-icon',
},
{
type: MetricType.GAUGE,
color: Color.BG_SAKURA_500,
iconTestId: 'gauge-icon',
},
{
type: MetricType.HISTOGRAM,
color: Color.BG_SIENNA_500,
iconTestId: 'bar-chart-2-icon',
},
{
type: MetricType.SUMMARY,
color: Color.BG_FOREST_500,
iconTestId: 'bar-chart-horizontal-icon',
},
{
type: MetricType.EXPONENTIAL_HISTOGRAM,
color: Color.BG_AQUA_500,
iconTestId: 'bar-chart-icon',
},
];
types.forEach(({ type, color, iconTestId }) => {
const { container } = render(<MetricTypeRenderer type={type} />);
const rendererDiv = container.firstChild as HTMLElement;
expect(rendererDiv).toHaveStyle({
backgroundColor: `${color}33`,
border: `1px solid ${color}`,
color,
});
expect(screen.getByTestId(iconTestId)).toBeInTheDocument();
});
});
});

View File

@@ -6,7 +6,6 @@ import { Filter } from 'api/v5/v5';
import * as useGetMetricsListFilterValues from 'hooks/metricsExplorer/useGetMetricsListFilterValues';
import * as useQueryBuilderOperationsHooks from 'hooks/queryBuilder/useQueryBuilderOperations';
import store from 'store';
import APIError from 'types/api/error';
import MetricsTable from '../MetricsTable';
import { MetricsListItemRowData } from '../types';
@@ -120,23 +119,12 @@ describe('MetricsTable', () => {
});
it('shows error state', () => {
const mockError = new APIError({
httpStatusCode: 400,
error: {
code: '400',
message: 'invalid filter expression',
url: '',
errors: [],
},
});
render(
<MemoryRouter>
<Provider store={store}>
<MetricsTable
isLoading={false}
isError
error={mockError}
data={[]}
pageSize={10}
currentPage={1}
@@ -151,8 +139,12 @@ describe('MetricsTable', () => {
</MemoryRouter>,
);
expect(screen.getByText('400')).toBeInTheDocument();
expect(screen.getByText('invalid filter expression')).toBeInTheDocument();
expect(screen.getByTestId('metrics-table-error-state')).toBeInTheDocument();
expect(
screen.getByText(
'Error fetching metrics. If the problem persists, please contact support.',
),
).toBeInTheDocument();
});
it('shows empty state when no data', () => {

View File

@@ -1,12 +1,16 @@
import { QueryClient, QueryClientProvider } from 'react-query';
// eslint-disable-next-line no-restricted-imports
import { Provider } from 'react-redux';
import { useSearchParams } from 'react-router-dom-v5-compat';
import * as metricsHooks from 'api/generated/services/metrics';
import { initialQueriesMap } from 'constants/queryBuilder';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
import ROUTES from 'constants/routes';
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
import { render, screen, waitFor } from 'tests/test-utils';
import { DataSource, QueryBuilderContextType } from 'types/common/queryBuilder';
import * as useGetMetricsListHooks from 'hooks/metricsExplorer/useGetMetricsList';
import * as useGetMetricsTreeMapHooks from 'hooks/metricsExplorer/useGetMetricsTreeMap';
import store from 'store';
import { render, screen } from 'tests/test-utils';
import Summary from '../Summary';
import { TreemapViewType } from '../types';
jest.mock('d3-hierarchy', () => ({
stratify: jest.fn().mockReturnValue({
@@ -40,135 +44,58 @@ jest.mock('react-router-dom', () => ({
pathname: `${ROUTES.METRICS_EXPLORER_BASE}`,
}),
}));
jest.mock('hooks/queryBuilder/useShareBuilderUrl', () => ({
useShareBuilderUrl: jest.fn(),
}));
// so filter expression assertions easy
jest.mock('../MetricsSearch', () => {
return function MockMetricsSearch(props: {
currentQueryFilterExpression: string;
}): JSX.Element {
return (
<div data-testid="metrics-search-expression">
{props.currentQueryFilterExpression}
</div>
);
};
});
const mockSetSearchParams = jest.fn();
const mockGetMetricsStats = jest.fn();
const mockGetMetricsTreemap = jest.fn();
const mockUseQueryBuilderData = {
handleRunQuery: jest.fn(),
stagedQuery: initialQueriesMap[DataSource.METRICS],
updateAllQueriesOperators: jest.fn(),
currentQuery: initialQueriesMap[DataSource.METRICS],
resetQuery: jest.fn(),
redirectWithQueryBuilderData: jest.fn(),
isStagedQueryUpdated: jest.fn(),
handleSetQueryData: jest.fn(),
handleSetFormulaData: jest.fn(),
handleSetQueryItemData: jest.fn(),
handleSetConfig: jest.fn(),
removeQueryBuilderEntityByIndex: jest.fn(),
removeQueryTypeItemByIndex: jest.fn(),
isDefaultQuery: jest.fn(),
};
const useGetMetricsStatsSpy = jest.spyOn(metricsHooks, 'useGetMetricsStats');
const useGetMetricsTreemapSpy = jest.spyOn(
metricsHooks,
'useGetMetricsTreemap',
);
const useQueryBuilderSpy = jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder');
describe('Summary', () => {
beforeEach(() => {
jest.clearAllMocks();
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams(),
mockSetSearchParams,
]);
useGetMetricsStatsSpy.mockReturnValue({
data: null,
mutate: mockGetMetricsStats,
isLoading: true,
isError: false,
error: null,
isIdle: true,
isSuccess: false,
reset: jest.fn(),
status: 'idle',
} as any);
useGetMetricsTreemapSpy.mockReturnValue({
data: null,
mutate: mockGetMetricsTreemap,
isLoading: true,
isError: false,
error: null,
isIdle: true,
isSuccess: false,
reset: jest.fn(),
status: 'idle',
} as any);
useQueryBuilderSpy.mockReturnValue(({
...mockUseQueryBuilderData,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
});
it('does not carry filter expression from a previous page', async () => {
const staleFilterExpression = "service.name = 'redis'";
// prev filter from logs explorer
const staleQuery = {
...initialQueriesMap[DataSource.METRICS],
builder: {
...initialQueriesMap[DataSource.METRICS].builder,
queryData: [
const queryClient = new QueryClient();
const mockMetricName = 'test-metric';
jest.spyOn(useGetMetricsListHooks, 'useGetMetricsList').mockReturnValue({
data: {
payload: {
status: 'success',
data: {
metrics: [
{
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
filter: { expression: staleFilterExpression },
metric_name: mockMetricName,
description: 'description for a test metric',
type: MetricType.GAUGE,
unit: 'count',
lastReceived: '1715702400',
[TreemapViewType.TIMESERIES]: 100,
[TreemapViewType.SAMPLES]: 100,
},
],
},
};
// stagedQuery has stale filter (before QueryBuilder resets it)
useQueryBuilderSpy.mockReturnValue(({
...mockUseQueryBuilderData,
stagedQuery: staleQuery,
currentQuery: staleQuery,
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
const { rerender } = render(<Summary />);
expect(screen.getByTestId('metrics-search-expression')).toHaveTextContent(
staleFilterExpression,
);
// QB route change effect resets stagedQuery to null
useQueryBuilderSpy.mockReturnValue(({
...mockUseQueryBuilderData,
stagedQuery: null,
currentQuery: initialQueriesMap[DataSource.METRICS],
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
rerender(<Summary />);
await waitFor(() => {
expect(
screen.getByTestId('metrics-search-expression'),
).toBeEmptyDOMElement();
});
});
},
},
isError: false,
isLoading: false,
} as any);
jest.spyOn(useGetMetricsTreeMapHooks, 'useGetMetricsTreeMap').mockReturnValue({
data: {
payload: {
status: 'success',
data: {
[TreemapViewType.TIMESERIES]: [
{
metric_name: mockMetricName,
percentage: 100,
total_value: 100,
},
],
[TreemapViewType.SAMPLES]: [
{
metric_name: mockMetricName,
percentage: 100,
},
],
},
},
},
isError: false,
isLoading: false,
} as any);
const mockSetSearchParams = jest.fn();
describe('Summary', () => {
it('persists inspect modal open state across page refresh', () => {
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({
@@ -178,7 +105,13 @@ describe('Summary', () => {
mockSetSearchParams,
]);
render(<Summary />);
render(
<QueryClientProvider client={queryClient}>
<Provider store={store}>
<Summary />
</Provider>
</QueryClientProvider>,
);
expect(screen.queryByText('Proportion View')).not.toBeInTheDocument();
});
@@ -187,12 +120,18 @@ describe('Summary', () => {
(useSearchParams as jest.Mock).mockReturnValue([
new URLSearchParams({
isMetricDetailsOpen: 'true',
selectedMetricName: 'test-metric',
selectedMetricName: mockMetricName,
}),
mockSetSearchParams,
]);
render(<Summary />);
render(
<QueryClientProvider client={queryClient}>
<Provider store={store}>
<Summary />
</Provider>
</QueryClientProvider>,
);
expect(screen.queryByText('Proportion View')).not.toBeInTheDocument();
});

View File

@@ -2,6 +2,7 @@ import {
MetricsexplorertypesTreemapModeDTO,
MetrictypesTypeDTO,
} from 'api/generated/services/sigNoz.schemas';
import { MetricType } from 'api/metricsExplorer/getMetricsList';
export const METRICS_TABLE_PAGE_SIZE = 10;
@@ -18,6 +19,15 @@ export const TREEMAP_SQUARE_PADDING = 5;
export const TREEMAP_MARGINS = { TOP: 10, LEFT: 10, RIGHT: 10, BOTTOM: 10 };
// TODO: Remove this once API migration is complete
export const METRIC_TYPE_LABEL_MAP = {
[MetricType.SUM]: 'Sum',
[MetricType.GAUGE]: 'Gauge',
[MetricType.HISTOGRAM]: 'Histogram',
[MetricType.SUMMARY]: 'Summary',
[MetricType.EXPONENTIAL_HISTOGRAM]: 'Exp. Histogram',
};
export const METRIC_TYPE_VIEW_LABEL_MAP: Record<MetrictypesTypeDTO, string> = {
[MetrictypesTypeDTO.sum]: 'Sum',
[MetrictypesTypeDTO.gauge]: 'Gauge',
@@ -26,6 +36,15 @@ export const METRIC_TYPE_VIEW_LABEL_MAP: Record<MetrictypesTypeDTO, string> = {
[MetrictypesTypeDTO.exponentialhistogram]: 'Exp. Histogram',
};
// TODO(@amlannandy): To remove this once API migration is complete
export const METRIC_TYPE_VALUES_MAP: Record<MetricType, string> = {
[MetricType.SUM]: 'Sum',
[MetricType.GAUGE]: 'Gauge',
[MetricType.HISTOGRAM]: 'Histogram',
[MetricType.SUMMARY]: 'Summary',
[MetricType.EXPONENTIAL_HISTOGRAM]: 'ExponentialHistogram',
};
export const METRIC_TYPE_VIEW_VALUES_MAP: Record<MetrictypesTypeDTO, string> = {
[MetrictypesTypeDTO.sum]: 'Sum',
[MetrictypesTypeDTO.gauge]: 'Gauge',

View File

@@ -5,13 +5,11 @@ import {
Querybuildertypesv5OrderByDTO,
} from 'api/generated/services/sigNoz.schemas';
import { Filter } from 'api/v5/v5';
import APIError from 'types/api/error';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
export interface MetricsTableProps {
isLoading: boolean;
isError: boolean;
error?: APIError;
data: MetricsListItemRowData[];
pageSize: number;
currentPage: number;
@@ -35,7 +33,6 @@ export interface MetricsTreemapProps {
data: MetricsexplorertypesTreemapResponseDTO | undefined;
isLoading: boolean;
isError: boolean;
error?: APIError;
viewType: MetricsexplorertypesTreemapModeDTO;
openMetricDetails: (metricName: string, view: 'list' | 'treemap') => void;
setHeatmapView: (value: MetricsexplorertypesTreemapModeDTO) => void;
@@ -44,7 +41,6 @@ export interface MetricsTreemapProps {
export interface MetricsTreemapInternalProps {
isLoading: boolean;
isError: boolean;
error?: APIError;
data: MetricsexplorertypesTreemapResponseDTO | undefined;
viewType: MetricsexplorertypesTreemapModeDTO;
openMetricDetails: (metricName: string, view: 'list' | 'treemap') => void;

View File

@@ -1,11 +1,5 @@
import { useEffect, useState } from 'react';
import {
fireEvent,
render,
screen,
waitFor,
within,
} from '@testing-library/react';
import { fireEvent, render, screen, within } from '@testing-library/react';
import {
MetricsexplorertypesListMetricDTO,
MetrictypesTypeDTO,
@@ -227,108 +221,6 @@ describe('MetricNameSelector', () => {
expect(container.querySelector('.ant-spin-spinning')).toBeInTheDocument();
});
it('preserves metric search text for signalSource normalization transition (undefined -> empty)', async () => {
returnMetrics([makeMetric({ metricName: 'http_requests_total' })]);
const query = makeQuery({
aggregateAttribute: {
key: 'http_requests_total',
type: '',
dataType: DataTypes.Float64,
},
aggregations: [
{
metricName: 'http_requests_total',
timeAggregation: 'rate',
spaceAggregation: 'sum',
temporality: '',
},
] as MetricAggregation[],
});
const { rerender } = render(
<MetricNameSelector
query={query}
onChange={jest.fn()}
signalSource={undefined}
/>,
);
rerender(
<MetricNameSelector query={query} onChange={jest.fn()} signalSource="" />,
);
await waitFor(() => {
const lastCall =
mockUseListMetrics.mock.calls[mockUseListMetrics.mock.calls.length - 1];
expect(lastCall?.[0]).toMatchObject({
searchText: 'http_requests_total',
limit: 100,
});
});
});
it('updates search text when metric name is hydrated after initial mount', async () => {
returnMetrics([makeMetric({ metricName: 'signoz_latency.bucket' })]);
const emptyQuery = makeQuery({
aggregateAttribute: {
key: '',
type: '',
dataType: DataTypes.Float64,
},
aggregations: [
{
metricName: '',
timeAggregation: 'rate',
spaceAggregation: 'sum',
temporality: '',
},
] as MetricAggregation[],
});
const hydratedQuery = makeQuery({
aggregateAttribute: {
key: '',
type: '',
dataType: DataTypes.Float64,
},
aggregations: [
{
metricName: 'signoz_latency.bucket',
timeAggregation: 'rate',
spaceAggregation: 'sum',
temporality: '',
},
] as MetricAggregation[],
});
const { rerender } = render(
<MetricNameSelector
query={emptyQuery}
onChange={jest.fn()}
signalSource=""
/>,
);
rerender(
<MetricNameSelector
query={hydratedQuery}
onChange={jest.fn()}
signalSource=""
/>,
);
await waitFor(() => {
const lastCall =
mockUseListMetrics.mock.calls[mockUseListMetrics.mock.calls.length - 1];
expect(lastCall?.[0]).toMatchObject({
searchText: 'signoz_latency.bucket',
limit: 100,
});
});
});
});
describe('selecting a metric type updates the aggregation options', () => {

View File

@@ -98,30 +98,15 @@ export const MetricNameSelector = memo(function MetricNameSelector({
useEffect(() => {
setInputValue(currentMetricName || defaultValue || '');
if (currentMetricName) {
setSearchText(currentMetricName);
}
}, [defaultValue, currentMetricName]);
useEffect(() => {
if (prevSignalSourceRef.current !== signalSource) {
const previousSignalSource = prevSignalSourceRef.current;
prevSignalSourceRef.current = signalSource;
const isNormalizationTransition =
(previousSignalSource === undefined && signalSource === '') ||
(previousSignalSource === '' && signalSource === undefined);
if (isNormalizationTransition && currentMetricName) {
setSearchText(currentMetricName);
setInputValue(currentMetricName || defaultValue || '');
return;
}
setSearchText('');
setInputValue('');
}
}, [signalSource, currentMetricName, defaultValue]);
}, [signalSource]);
const debouncedValue = useDebounce(searchText, DEBOUNCE_DELAY);
@@ -167,9 +152,7 @@ export const MetricNameSelector = memo(function MetricNameSelector({
}, [metrics]);
useEffect(() => {
const metricName =
(query.aggregations?.[0] as MetricAggregation)?.metricName ||
query.aggregateAttribute?.key;
const metricName = (query.aggregations?.[0] as MetricAggregation)?.metricName;
const hasAggregateAttributeType = query.aggregateAttribute?.type;
if (metricName && !hasAggregateAttributeType && metrics.length > 0) {
@@ -181,13 +164,7 @@ export const MetricNameSelector = memo(function MetricNameSelector({
);
}
}
}, [
metrics,
query.aggregations,
query.aggregateAttribute?.key,
query.aggregateAttribute?.type,
onChange,
]);
}, [metrics, query.aggregations, query.aggregateAttribute?.type, onChange]);
const resolveMetricFromText = useCallback(
(text: string): BaseAutocompleteData => {

View File

@@ -0,0 +1,50 @@
import { useMemo } from 'react';
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
import {
getMetricsTreeMap,
MetricsTreeMapPayload,
MetricsTreeMapResponse,
} from 'api/metricsExplorer/getMetricsTreeMap';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { ErrorResponse, SuccessResponse } from 'types/api';
type UseGetMetricsTreeMap = (
requestData: MetricsTreeMapPayload,
options?: UseQueryOptions<
SuccessResponse<MetricsTreeMapResponse> | ErrorResponse,
Error
>,
headers?: Record<string, string>,
) => UseQueryResult<
SuccessResponse<MetricsTreeMapResponse> | ErrorResponse,
Error
>;
export const useGetMetricsTreeMap: UseGetMetricsTreeMap = (
requestData,
options,
headers,
) => {
const queryKey = useMemo(() => {
if (options?.queryKey && Array.isArray(options.queryKey)) {
return [...options.queryKey];
}
if (options?.queryKey && typeof options.queryKey === 'string') {
return options.queryKey;
}
return [REACT_QUERY_KEY.GET_METRICS_TREE_MAP, requestData];
}, [options?.queryKey, requestData]);
return useQuery<
SuccessResponse<MetricsTreeMapResponse> | ErrorResponse,
Error
>({
queryFn: ({ signal }) => getMetricsTreeMap(requestData, signal, headers),
...options,
queryKey,
});
};

View File

@@ -0,0 +1,26 @@
import { useMutation, UseMutationResult } from 'react-query';
import updateMetricMetadata, {
UpdateMetricMetadataProps,
UpdateMetricMetadataResponse,
} from 'api/metricsExplorer/updateMetricMetadata';
import { ErrorResponse, SuccessResponse } from 'types/api';
export interface UseUpdateMetricMetadataProps {
metricName: string;
payload: UpdateMetricMetadataProps;
}
export function useUpdateMetricMetadata(): UseMutationResult<
SuccessResponse<UpdateMetricMetadataResponse> | ErrorResponse,
Error,
UseUpdateMetricMetadataProps
> {
return useMutation<
SuccessResponse<UpdateMetricMetadataResponse> | ErrorResponse,
Error,
UseUpdateMetricMetadataProps
>({
mutationFn: ({ metricName, payload }) =>
updateMetricMetadata(metricName, payload),
});
}

View File

@@ -4,7 +4,6 @@ import cx from 'classnames';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import dayjs from 'dayjs';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useTimezone } from 'providers/Timezone';
import { TooltipProps } from '../types';
@@ -23,14 +22,6 @@ export default function Tooltip({
const isDarkMode = useIsDarkMode();
const [listHeight, setListHeight] = useState(0);
const tooltipContent = content ?? [];
const { timezone: userTimezone } = useTimezone();
const resolvedTimezone = useMemo(() => {
if (!timezone) {
return userTimezone.value;
}
return timezone.value;
}, [timezone, userTimezone]);
const headerTitle = useMemo(() => {
if (!showTooltipHeader) {
@@ -42,10 +33,10 @@ export default function Tooltip({
return null;
}
return dayjs(data[0][cursorIdx] * 1000)
.tz(resolvedTimezone)
.tz(timezone)
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
}, [
resolvedTimezone,
timezone,
uPlotInstance.data,
uPlotInstance.cursor.idx,
showTooltipHeader,

View File

@@ -83,7 +83,7 @@ function createUPlotInstance(cursorIdx: number | null): uPlot {
function renderTooltip(props: Partial<TooltipTestProps> = {}): RenderResult {
const defaultProps: TooltipTestProps = {
uPlotInstance: createUPlotInstance(null),
timezone: { value: 'UTC', name: 'UTC', offset: '0', searchIndex: '0' },
timezone: 'UTC',
content: [],
showTooltipHeader: true,
// TooltipRenderArgs (not used directly in component but required by type)

View File

@@ -92,7 +92,7 @@ export default function UPlotChart({
setPlotContextInitialState({
uPlotInstance: plot,
id: config.getId(),
widgetId: config.getWidgetId(),
shouldSaveSelectionPreference: config.getShouldSaveSelectionPreference(),
});

View File

@@ -84,7 +84,7 @@ const createMockConfig = (): UPlotConfigBuilder => {
hooks: {},
cursor: {},
}),
getId: jest.fn().mockReturnValue(undefined),
getWidgetId: jest.fn().mockReturnValue(undefined),
getShouldSaveSelectionPreference: jest.fn().mockReturnValue(false),
} as unknown) as UPlotConfigBuilder;
};

View File

@@ -1,5 +1,4 @@
import { ReactNode } from 'react';
import { Timezone } from 'components/CustomTimePicker/timezoneUtils';
import { PrecisionOption } from 'components/Graph/types';
import uPlot from 'uplot';
@@ -62,7 +61,7 @@ export interface TooltipRenderArgs {
export interface BaseTooltipProps {
showTooltipHeader?: boolean;
timezone?: Timezone;
timezone: string;
yAxisUnit?: string;
decimalPrecision?: PrecisionOption;
content?: TooltipContentItem[];

View File

@@ -74,21 +74,23 @@ export class UPlotConfigBuilder extends ConfigBuilder<
private tzDate: ((timestamp: number) => Date) | undefined;
private id: string;
private widgetId: string | undefined;
private onDragSelect: (startTime: number, endTime: number) => void;
constructor(args: ConfigBuilderProps) {
super(args);
constructor(args?: ConfigBuilderProps) {
super(args ?? {});
const {
id,
widgetId,
onDragSelect,
tzDate,
selectionPreferencesSource,
shouldSaveSelectionPreference,
stepInterval,
} = args ?? {};
this.id = id;
if (widgetId) {
this.widgetId = widgetId;
}
if (tzDate) {
this.tzDate = tzDate;
@@ -250,10 +252,10 @@ export class UPlotConfigBuilder extends ConfigBuilder<
*/
private getStoredVisibility(): SeriesVisibilityItem[] | null {
if (
this.id &&
this.widgetId &&
this.selectionPreferencesSource === SelectionPreferencesSource.LOCAL_STORAGE
) {
return getStoredSeriesVisibility(this.id);
return getStoredSeriesVisibility(this.widgetId);
}
return null;
}
@@ -376,10 +378,10 @@ export class UPlotConfigBuilder extends ConfigBuilder<
}
/**
* Get the id for the builder
* Get the widget id
*/
getId(): string {
return this.id;
getWidgetId(): string | undefined {
return this.widgetId;
}
/**

View File

@@ -1,3 +1,4 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { themeColors } from 'constants/theme';
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
import { calculateWidthBasedOnStepInterval } from 'lib/uPlotV2/utils';
@@ -22,9 +23,6 @@ import {
* Path builders are static and shared across all instances of UPlotSeriesBuilder
*/
let builders: PathBuilders | null = null;
const DEFAULT_LINE_WIDTH = 2;
export const POINT_SIZE_FACTOR = 2.5;
export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
constructor(props: SeriesProps) {
super(props);
@@ -55,7 +53,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
const { lineWidth, lineStyle, lineCap, fillColor } = this.props;
const lineConfig: Partial<Series> = {
stroke: resolvedLineColor,
width: lineWidth ?? DEFAULT_LINE_WIDTH,
width: lineWidth ?? 2,
};
if (lineStyle === LineStyle.Dashed) {
@@ -68,9 +66,9 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
if (fillColor) {
lineConfig.fill = fillColor;
} else if (this.props.drawStyle === DrawStyle.Bar) {
} else if (this.props.panelType === PANEL_TYPES.BAR) {
lineConfig.fill = resolvedLineColor;
} else if (this.props.drawStyle === DrawStyle.Histogram) {
} else if (this.props.panelType === PANEL_TYPES.HISTOGRAM) {
lineConfig.fill = `${resolvedLineColor}40`;
}
@@ -139,19 +137,10 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
drawStyle,
showPoints,
} = this.props;
/**
* If pointSize is not provided, use the lineWidth * POINT_SIZE_FACTOR
* to determine the point size.
* POINT_SIZE_FACTOR is 2, so the point size will be 2x the line width.
*/
const resolvedPointSize =
pointSize ?? (lineWidth ?? DEFAULT_LINE_WIDTH) * POINT_SIZE_FACTOR;
const pointsConfig: Partial<Series.Points> = {
stroke: resolvedLineColor,
fill: resolvedLineColor,
size: resolvedPointSize,
size: !pointSize || pointSize < (lineWidth ?? 2) ? undefined : pointSize,
filter: pointsFilter || undefined,
};
@@ -242,7 +231,7 @@ function getPathBuilder({
throw new Error('Required uPlot path builders are not available');
}
if (drawStyle === DrawStyle.Bar || drawStyle === DrawStyle.Histogram) {
if (drawStyle === DrawStyle.Bar) {
const pathBuilders = uPlot.paths;
return getBarPathBuilder({
pathBuilders,

View File

@@ -1,3 +1,4 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import uPlot from 'uplot';
import {
@@ -42,27 +43,27 @@ describe('UPlotConfigBuilder', () => {
label: 'Requests',
colorMapping: {},
drawStyle: DrawStyle.Line,
panelType: PANEL_TYPES.TIME_SERIES,
...overrides,
});
it('returns correct save selection preference flag from constructor args', () => {
const builder = new UPlotConfigBuilder({
id: 'widget-123',
shouldSaveSelectionPreference: true,
});
expect(builder.getShouldSaveSelectionPreference()).toBe(true);
});
it('returns id from constructor args', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
it('returns widgetId from constructor args', () => {
const builder = new UPlotConfigBuilder({ widgetId: 'widget-123' });
expect(builder.getId()).toBe('widget-123');
expect(builder.getWidgetId()).toBe('widget-123');
});
it('sets tzDate from constructor and includes it in config', () => {
const tzDate = (ts: number): Date => new Date(ts);
const builder = new UPlotConfigBuilder({ id: 'widget-123', tzDate });
const builder = new UPlotConfigBuilder({ tzDate });
const config = builder.getConfig();
@@ -71,7 +72,7 @@ describe('UPlotConfigBuilder', () => {
it('does not call onDragSelect for click without drag (width === 0)', () => {
const onDragSelect = jest.fn();
const builder = new UPlotConfigBuilder({ id: 'widget-123', onDragSelect });
const builder = new UPlotConfigBuilder({ onDragSelect });
const config = builder.getConfig();
const setSelectHooks = config.hooks?.setSelect ?? [];
@@ -84,15 +85,14 @@ describe('UPlotConfigBuilder', () => {
// Simulate uPlot calling the hook
const setSelectHook = setSelectHooks[0];
expect(setSelectHook).toBeDefined();
setSelectHook?.(uplotInstance);
setSelectHook!(uplotInstance);
expect(onDragSelect).not.toHaveBeenCalled();
});
it('calls onDragSelect with start and end times in milliseconds for a drag selection', () => {
const onDragSelect = jest.fn();
const builder = new UPlotConfigBuilder({ id: 'widget-123', onDragSelect });
const builder = new UPlotConfigBuilder({ onDragSelect });
const config = builder.getConfig();
const setSelectHooks = config.hooks?.setSelect ?? [];
@@ -111,8 +111,7 @@ describe('UPlotConfigBuilder', () => {
} as unknown) as uPlot;
const setSelectHook = setSelectHooks[0];
expect(setSelectHook).toBeDefined();
setSelectHook?.(uplotInstance);
setSelectHook!(uplotInstance);
expect(onDragSelect).toHaveBeenCalledTimes(1);
// 100 and 110 seconds converted to milliseconds
@@ -120,7 +119,7 @@ describe('UPlotConfigBuilder', () => {
});
it('adds and removes hooks via addHook, and exposes them through getConfig', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
const drawHook = jest.fn();
const remove = builder.addHook('draw', drawHook as uPlot.Hooks.Defs['draw']);
@@ -135,7 +134,7 @@ describe('UPlotConfigBuilder', () => {
});
it('adds axes, scales, and series and wires them into the final config', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
// Add axis and scale
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
@@ -171,7 +170,7 @@ describe('UPlotConfigBuilder', () => {
});
it('merges axis when addAxis is called twice with same scaleKey', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
builder.addAxis({ scaleKey: 'y', label: 'Updated Label', show: false });
@@ -184,7 +183,7 @@ describe('UPlotConfigBuilder', () => {
});
it('merges scale when addScale is called twice with same scaleKey', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
builder.addScale({ scaleKey: 'y', min: 0 });
builder.addScale({ scaleKey: 'y', max: 100 });
@@ -205,7 +204,7 @@ describe('UPlotConfigBuilder', () => {
]);
const builder = new UPlotConfigBuilder({
id: 'widget-1',
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
@@ -232,7 +231,7 @@ describe('UPlotConfigBuilder', () => {
]);
const builder = new UPlotConfigBuilder({
id: 'widget-1',
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
@@ -270,7 +269,7 @@ describe('UPlotConfigBuilder', () => {
]);
const builder = new UPlotConfigBuilder({
id: 'widget-1',
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
@@ -303,7 +302,7 @@ describe('UPlotConfigBuilder', () => {
]);
const builder = new UPlotConfigBuilder({
id: 'widget-dup',
widgetId: 'widget-dup',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
@@ -330,7 +329,7 @@ describe('UPlotConfigBuilder', () => {
it('does not attempt to read stored visibility when using in-memory preferences', () => {
const builder = new UPlotConfigBuilder({
id: 'widget-1',
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.IN_MEMORY,
});
@@ -345,7 +344,7 @@ describe('UPlotConfigBuilder', () => {
});
it('adds thresholds only once per scale key', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
const thresholdsOptions = {
scaleKey: 'y',
@@ -363,7 +362,7 @@ describe('UPlotConfigBuilder', () => {
});
it('adds multiple thresholds when scale key is different', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
const thresholdsOptions = {
scaleKey: 'y',
@@ -384,7 +383,7 @@ describe('UPlotConfigBuilder', () => {
});
it('merges cursor configuration with defaults instead of replacing them', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
builder.setCursor({
drag: { setScale: false },
@@ -399,7 +398,7 @@ describe('UPlotConfigBuilder', () => {
describe('getCursorConfig', () => {
it('returns default cursor merged with custom cursor when no stepInterval', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
builder.setCursor({
drag: { setScale: false },
@@ -413,7 +412,7 @@ describe('UPlotConfigBuilder', () => {
});
it('returns hover prox as DEFAULT_HOVER_PROXIMITY_VALUE when stepInterval is not set', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
const cursorConfig = builder.getCursorConfig();
@@ -425,15 +424,15 @@ describe('UPlotConfigBuilder', () => {
const mockWidth = 100;
calculateWidthBasedOnStepIntervalMock.mockReturnValue(mockWidth);
const builder = new UPlotConfigBuilder({ id: 'widget-123', stepInterval });
const builder = new UPlotConfigBuilder({ stepInterval });
const cursorConfig = builder.getCursorConfig();
expect(typeof cursorConfig.hover?.prox).toBe('function');
const uPlotInstance = {} as uPlot;
const prox = cursorConfig.hover?.prox as ((u: uPlot) => number) | undefined;
expect(prox).toBeDefined();
const proxResult = prox ? prox(uPlotInstance) : NaN;
const proxResult = (cursorConfig.hover!.prox as (u: uPlot) => number)(
uPlotInstance,
);
expect(calculateWidthBasedOnStepIntervalMock).toHaveBeenCalledWith({
uPlotInstance,
@@ -444,7 +443,7 @@ describe('UPlotConfigBuilder', () => {
});
it('adds plugins and includes them in config', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
const plugin: uPlot.Plugin = {
opts: (): void => {},
hooks: {},
@@ -459,7 +458,7 @@ describe('UPlotConfigBuilder', () => {
it('sets padding, legend, focus, select, tzDate, bands and includes them in config', () => {
const tzDate = (ts: number): Date => new Date(ts);
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
const bands: uPlot.Band[] = [{ series: [1, 2], fill: (): string => '#000' }];
@@ -481,7 +480,7 @@ describe('UPlotConfigBuilder', () => {
});
it('does not include plugins when none added', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
const config = builder.getConfig();
@@ -489,7 +488,7 @@ describe('UPlotConfigBuilder', () => {
});
it('does not include bands when empty', () => {
const builder = new UPlotConfigBuilder({ id: 'widget-123' });
const builder = new UPlotConfigBuilder();
const config = builder.getConfig();

View File

@@ -1,3 +1,4 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { themeColors } from 'constants/theme';
import uPlot from 'uplot';
@@ -8,7 +9,7 @@ import {
LineStyle,
VisibilityMode,
} from '../types';
import { POINT_SIZE_FACTOR, UPlotSeriesBuilder } from '../UPlotSeriesBuilder';
import { UPlotSeriesBuilder } from '../UPlotSeriesBuilder';
const createBaseProps = (
overrides: Partial<SeriesProps> = {},
@@ -18,6 +19,7 @@ const createBaseProps = (
colorMapping: {},
drawStyle: DrawStyle.Line,
isDarkMode: false,
panelType: PANEL_TYPES.TIME_SERIES,
...overrides,
});
@@ -135,6 +137,7 @@ describe('UPlotSeriesBuilder', () => {
const smallPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
lineWidth: 4,
pointSize: 2,
}),
);
const largePointsBuilder = new UPlotSeriesBuilder(
@@ -147,7 +150,7 @@ describe('UPlotSeriesBuilder', () => {
const smallConfig = smallPointsBuilder.getConfig();
const largeConfig = largePointsBuilder.getConfig();
expect(smallConfig.points?.size).toBe(4 * POINT_SIZE_FACTOR); // should be lineWidth * POINT_SIZE_FACTOR, when pointSize is not provided
expect(smallConfig.points?.size).toBeUndefined();
expect(largeConfig.points?.size).toBe(4);
});

View File

@@ -34,7 +34,7 @@ export enum SelectionPreferencesSource {
* Props for configuring the uPlot config builder
*/
export interface ConfigBuilderProps {
id: string;
widgetId?: string;
onDragSelect?: (startTime: number, endTime: number) => void;
tzDate?: uPlot.LocalDateFromUnix;
selectionPreferencesSource?: SelectionPreferencesSource;
@@ -112,7 +112,6 @@ export enum DrawStyle {
Line = 'line',
Points = 'points',
Bar = 'bar',
Histogram = 'histogram',
}
export enum LineInterpolation {
@@ -169,6 +168,7 @@ export interface PointsConfig {
export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
scaleKey: string;
label?: string;
panelType: PANEL_TYPES;
colorMapping: Record<string, string>;
drawStyle: DrawStyle;
pathBuilder?: Series.PathBuilder;

View File

@@ -13,7 +13,7 @@ import { updateSeriesVisibilityToLocalStorage } from 'container/DashboardContain
import type uPlot from 'uplot';
export interface PlotContextInitialState {
uPlotInstance: uPlot | null;
id?: string;
widgetId?: string;
shouldSaveSelectionPreference?: boolean;
}
export interface IPlotContext {
@@ -31,17 +31,17 @@ export const PlotContextProvider = ({
}: PropsWithChildren): JSX.Element => {
const uPlotInstanceRef = useRef<uPlot | null>(null);
const activeSeriesIndex = useRef<number | undefined>(undefined);
const idRef = useRef<string | undefined>(undefined);
const widgetIdRef = useRef<string | undefined>(undefined);
const shouldSavePreferencesRef = useRef<boolean>(false);
const setPlotContextInitialState = useCallback(
({
uPlotInstance,
id,
widgetId,
shouldSaveSelectionPreference,
}: PlotContextInitialState): void => {
uPlotInstanceRef.current = uPlotInstance;
idRef.current = id;
widgetIdRef.current = widgetId;
activeSeriesIndex.current = undefined;
shouldSavePreferencesRef.current = !!shouldSaveSelectionPreference;
},
@@ -50,7 +50,7 @@ export const PlotContextProvider = ({
const syncSeriesVisibilityToLocalStorage = useCallback((): void => {
const plot = uPlotInstanceRef.current;
if (!plot || !idRef.current) {
if (!plot || !widgetIdRef.current) {
return;
}
@@ -61,7 +61,7 @@ export const PlotContextProvider = ({
}),
);
updateSeriesVisibilityToLocalStorage(idRef.current, seriesVisibility);
updateSeriesVisibilityToLocalStorage(widgetIdRef.current, seriesVisibility);
}, []);
const onToggleSeriesVisibility = useCallback(
@@ -84,7 +84,7 @@ export const PlotContextProvider = ({
show: isReset || currentSeriesIndex === seriesIndex,
});
});
if (idRef.current && shouldSavePreferencesRef.current) {
if (widgetIdRef.current && shouldSavePreferencesRef.current) {
syncSeriesVisibilityToLocalStorage();
}
});
@@ -104,7 +104,7 @@ export const PlotContextProvider = ({
return;
}
plot.setSeries(seriesIndex, { show: !series.show });
if (idRef.current && shouldSavePreferencesRef.current) {
if (widgetIdRef.current && shouldSavePreferencesRef.current) {
syncSeriesVisibilityToLocalStorage();
}
},

View File

@@ -32,13 +32,13 @@ const createMockPlot = (series: MockSeries[] = []): uPlot =>
interface TestComponentProps {
plot?: uPlot;
id?: string;
widgetId?: string;
shouldSaveSelectionPreference?: boolean;
}
const TestComponent = ({
plot,
id,
widgetId,
shouldSaveSelectionPreference,
}: TestComponentProps): JSX.Element => {
const {
@@ -49,13 +49,17 @@ const TestComponent = ({
onFocusSeries,
} = usePlotContext();
const handleInit = (): void => {
if (!plot || !id || typeof shouldSaveSelectionPreference !== 'boolean') {
if (
!plot ||
!widgetId ||
typeof shouldSaveSelectionPreference !== 'boolean'
) {
return;
}
setPlotContextInitialState({
uPlotInstance: plot,
id,
widgetId,
shouldSaveSelectionPreference,
});
};
@@ -144,7 +148,11 @@ describe('PlotContext', () => {
render(
<PlotContextProvider>
<TestComponent plot={plot} id="widget-123" shouldSaveSelectionPreference />
<TestComponent
plot={plot}
widgetId="widget-123"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
);
@@ -191,7 +199,7 @@ describe('PlotContext', () => {
<PlotContextProvider>
<TestComponent
plot={plot}
id="widget-visibility"
widgetId="widget-visibility"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
@@ -232,7 +240,7 @@ describe('PlotContext', () => {
<PlotContextProvider>
<TestComponent
plot={plot}
id="widget-reset"
widgetId="widget-reset"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
@@ -282,7 +290,7 @@ describe('PlotContext', () => {
<PlotContextProvider>
<TestComponent
plot={plot}
id="widget-toggle"
widgetId="widget-toggle"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
@@ -308,7 +316,7 @@ describe('PlotContext', () => {
<PlotContextProvider>
<TestComponent
plot={plot}
id="widget-missing-series"
widgetId="widget-missing-series"
shouldSaveSelectionPreference
/>
</PlotContextProvider>,
@@ -333,7 +341,7 @@ describe('PlotContext', () => {
<PlotContextProvider>
<TestComponent
plot={plot}
id="widget-no-persist"
widgetId="widget-no-persist"
shouldSaveSelectionPreference={false}
/>
</PlotContextProvider>,
@@ -371,7 +379,7 @@ describe('PlotContext', () => {
<PlotContextProvider>
<TestComponent
plot={plot}
id="widget-focus"
widgetId="widget-focus"
shouldSaveSelectionPreference={false}
/>
</PlotContextProvider>,

View File

@@ -40,7 +40,7 @@ class TestConfigBuilder extends UPlotConfigBuilder {
type ConfigMock = TestConfigBuilder;
function createConfigMock(): ConfigMock {
return new TestConfigBuilder({ id: 'test-widget' });
return new TestConfigBuilder();
}
function getHandler(config: ConfigMock, hookName: string): HookHandler {

View File

@@ -25,7 +25,7 @@ function resetStore(): void {
function mockContext(overrides: Partial<VariableFetchContext> = {}): void {
getVariableDependencyContextSpy.mockReturnValue({
doAllQueryVariablesHaveValuesSelected: false,
doAllVariablesHaveValuesSelected: false,
variableTypes: {},
dynamicVariableOrder: [],
dependencyData: null,
@@ -175,9 +175,9 @@ describe('variableFetchStore', () => {
expect(storeSnapshot.cycleIds.b).toBe(1);
});
it('should set dynamic variables to waiting when not all query variables have values', () => {
it('should set dynamic variables to waiting when not all variables have values', () => {
mockContext({
doAllQueryVariablesHaveValuesSelected: false,
doAllVariablesHaveValuesSelected: false,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
@@ -190,9 +190,9 @@ describe('variableFetchStore', () => {
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
it('should set dynamic variables to loading when all query variables have values', () => {
it('should set dynamic variables to loading when all variables have values', () => {
mockContext({
doAllQueryVariablesHaveValuesSelected: true,
doAllVariablesHaveValuesSelected: true,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
@@ -523,77 +523,5 @@ describe('variableFetchStore', () => {
expect(variableFetchStore.getSnapshot().states.b).toBe('revalidating');
});
it('should enqueue dynamic variables immediately when all query variables are settled', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { customVar: [] },
parentDependencyGraph: {},
}),
variableTypes: { q1: 'QUERY', customVar: 'CUSTOM', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.q1 = 'idle';
d.states.customVar = 'idle';
d.states.dyn1 = 'idle';
});
enqueueDescendantsOfVariable('customVar');
const snapshot = variableFetchStore.getSnapshot();
expect(snapshot.states.dyn1).toBe('loading');
expect(snapshot.cycleIds.dyn1).toBe(1);
});
it('should set dynamic variables to waiting when query variables are not yet settled', () => {
// a is a query variable still loading; changing customVar should queue dyn1 as waiting
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { customVar: [] },
parentDependencyGraph: {},
}),
variableTypes: { a: 'QUERY', customVar: 'CUSTOM', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.customVar = 'idle';
d.states.dyn1 = 'idle';
});
enqueueDescendantsOfVariable('customVar');
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('waiting');
});
it('should set dynamic variables to waiting when a query descendant is now loading', () => {
// a -> b (QUERY), dyn1 (DYNAMIC). When a changes, b starts loading,
// so dyn1 should wait until b settles.
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.states.dyn1 = 'idle';
});
enqueueDescendantsOfVariable('a');
const snapshot = variableFetchStore.getSnapshot();
// b's parent (a) is idle → b starts loading
expect(snapshot.states.b).toBe('loading');
// dyn1 must wait because b is now loading (not settled)
expect(snapshot.states.dyn1).toBe('waiting');
});
});
});

View File

@@ -134,7 +134,7 @@ describe('dashboardVariablesStore', () => {
expect(dependencyData).not.toBeNull();
});
it('should report doAllQueryVariablesHaveValuesSelected as true when all query variables have values', () => {
it('should report doAllVariablesHaveValuesSelected as true when all variables have selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
@@ -146,46 +146,18 @@ describe('dashboardVariablesStore', () => {
}),
region: createVariable({
name: 'region',
type: 'QUERY',
type: 'CUSTOM',
order: 1,
selectedValue: 'us-east',
}),
},
});
const {
doAllQueryVariablesHaveValuesSelected,
} = getVariableDependencyContext();
expect(doAllQueryVariablesHaveValuesSelected).toBe(true);
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report doAllQueryVariablesHaveValuesSelected as false when a query variable lacks a selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
region: createVariable({
name: 'region',
type: 'QUERY',
order: 1,
selectedValue: undefined,
}),
},
});
const {
doAllQueryVariablesHaveValuesSelected,
} = getVariableDependencyContext();
expect(doAllQueryVariablesHaveValuesSelected).toBe(false);
});
it('should ignore non-QUERY variables when computing doAllQueryVariablesHaveValuesSelected', () => {
// env (QUERY) has a value; region (CUSTOM) and dyn1 (DYNAMIC) do not — they are ignored
it('should report doAllVariablesHaveValuesSelected as false when some variables lack selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
@@ -201,22 +173,14 @@ describe('dashboardVariablesStore', () => {
order: 1,
selectedValue: undefined,
}),
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 2,
selectedValue: '',
}),
},
});
const {
doAllQueryVariablesHaveValuesSelected,
} = getVariableDependencyContext();
expect(doAllQueryVariablesHaveValuesSelected).toBe(true);
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
it('should return true for doAllQueryVariablesHaveValuesSelected when there are no query variables', () => {
it('should treat DYNAMIC variable with allSelected=true and selectedValue=null as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
@@ -224,73 +188,110 @@ describe('dashboardVariablesStore', () => {
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
selectedValue: null as any,
allSelected: true,
}),
},
});
const {
doAllQueryVariablesHaveValuesSelected,
} = getVariableDependencyContext();
expect(doAllQueryVariablesHaveValuesSelected).toBe(true);
});
// Any non-nil, non-empty-array selectedValue is treated as selected
it.each([
{ label: 'numeric 0', selectedValue: 0 as number },
{ label: 'boolean false', selectedValue: false as boolean },
// ideally not possible but till we have concrete schema, we should not block dynamic variables
{ label: 'empty string', selectedValue: '' },
{
label: 'non-empty array',
selectedValue: ['a', 'b'] as (string | number | boolean)[],
},
])('should return true when selectedValue is $label', ({ selectedValue }) => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue,
order: 1,
selectedValue: 'prod',
}),
},
});
const {
doAllQueryVariablesHaveValuesSelected,
} = getVariableDependencyContext();
expect(doAllQueryVariablesHaveValuesSelected).toBe(true);
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
// null/undefined (tested above) and empty array are treated as not selected
it.each([
{
label: 'null',
selectedValue: null as IDashboardVariable['selectedValue'],
},
{ label: 'empty array', selectedValue: [] as (string | number | boolean)[] },
])(
'should return false when selectedValue is $label',
({ selectedValue }) => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue,
}),
},
});
it('should treat DYNAMIC variable with allSelected=true and selectedValue=undefined as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: undefined,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const {
doAllQueryVariablesHaveValuesSelected,
} = getVariableDependencyContext();
expect(doAllQueryVariablesHaveValuesSelected).toBe(false);
},
);
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and empty string selectedValue as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and empty array selectedValue as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: [] as any,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report false when a DYNAMIC variable has empty selectedValue and allSelected is not true', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
allSelected: false,
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
});
});

View File

@@ -1,4 +1,4 @@
import { isNil } from 'lodash-es';
import { isEmpty, isUndefined } from 'lodash-es';
import createStore from '../store';
import { VariableFetchContext } from '../variableFetchStore';
@@ -68,28 +68,28 @@ export function updateDashboardVariablesStore({
*/
export function getVariableDependencyContext(): VariableFetchContext {
const state = dashboardVariablesStore.getSnapshot();
// Dynamic variables should only wait on query variables having values,
// not on CUSTOM, TEXTBOX, or other types.
const doAllQueryVariablesHaveValuesSelected = Object.values(
state.variables,
).every((variable) => {
if (variable.type !== 'QUERY') {
return true;
}
if (isNil(variable.selectedValue)) {
return false;
}
// If every variable already has a selectedValue (e.g. persisted from
// localStorage/URL), dynamic variables can start in parallel.
// Otherwise they wait for query vars to settle first.
const doAllVariablesHaveValuesSelected = Object.values(state.variables).every(
(variable) => {
if (
variable.type === 'DYNAMIC' &&
(variable.selectedValue === null || isEmpty(variable.selectedValue)) &&
variable.allSelected === true
) {
return true;
}
if (Array.isArray(variable.selectedValue)) {
return variable.selectedValue.length > 0;
}
return true;
});
return (
!isUndefined(variable.selectedValue) && !isEmpty(variable.selectedValue)
);
},
);
return {
doAllQueryVariablesHaveValuesSelected,
doAllVariablesHaveValuesSelected,
variableTypes: state.variableTypes,
dynamicVariableOrder: state.dynamicVariableOrder,
dependencyData: state.dependencyData,

View File

@@ -36,7 +36,7 @@ export type VariableFetchContext = Pick<
IDashboardVariablesStoreState,
'variableTypes' | 'dynamicVariableOrder' | 'dependencyData'
> & {
doAllQueryVariablesHaveValuesSelected: boolean;
doAllVariablesHaveValuesSelected: boolean;
};
const initialState: IVariableFetchStoreState = {
@@ -88,7 +88,7 @@ export function initializeVariableFetchStore(variableNames: string[]): void {
*/
export function enqueueFetchOfAllVariables(): void {
const {
doAllQueryVariablesHaveValuesSelected,
doAllVariablesHaveValuesSelected,
dependencyData,
variableTypes,
dynamicVariableOrder,
@@ -116,7 +116,7 @@ export function enqueueFetchOfAllVariables(): void {
// otherwise wait for query variables to settle first
dynamicVariableOrder.forEach((name) => {
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
draft.states[name] = doAllQueryVariablesHaveValuesSelected
draft.states[name] = doAllVariablesHaveValuesSelected
? resolveFetchState(draft, name)
: 'waiting';
});
@@ -208,11 +208,7 @@ export function onVariableFetchFailure(name: string): void {
* ensures parents are set before children within a single update).
*/
export function enqueueDescendantsOfVariable(name: string): void {
const {
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
const { dependencyData, variableTypes } = getVariableDependencyContext();
if (!dependencyData) {
return;
}
@@ -234,18 +230,5 @@ export function enqueueDescendantsOfVariable(name: string): void {
? resolveFetchState(draft, desc)
: 'waiting';
});
// Dynamic variables implicitly depend on all query variable values.
// If all query variables are currently settled, start them immediately;
// otherwise they wait until query vars finish (unlocked via onVariableFetchComplete).
dynamicVariableOrder.forEach((dynName) => {
draft.cycleIds[dynName] = (draft.cycleIds[dynName] || 0) + 1;
draft.states[dynName] = areAllQueryVariablesSettled(
draft.states,
variableTypes,
)
? resolveFetchState(draft, dynName)
: 'waiting';
});
});
}

View File

@@ -27,7 +27,22 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
return err
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
if err := router.Handle("/api/v1/invite/bulk", handler.New(provider.authZ.AdminAccess(provider.userHandler.CreateBulkInvite), handler.OpenAPIDef{
ID: "CreateBulkInvite",
Tags: []string{"users"},
Summary: "Create bulk invite",
Description: "This endpoint creates a bulk invite for a user",
Request: make([]*types.PostableInvite, 0),
RequestContentType: "application/json",
Response: nil,
SuccessStatusCode: http.StatusCreated,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/invite/{token}", handler.New(provider.authZ.OpenAccess(provider.userHandler.GetInvite), handler.OpenAPIDef{
ID: "GetInvite",
Tags: []string{"users"},
@@ -45,7 +60,6 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
return err
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
if err := router.Handle("/api/v1/invite/{id}", handler.New(provider.authZ.AdminAccess(provider.userHandler.DeleteInvite), handler.OpenAPIDef{
ID: "DeleteInvite",
Tags: []string{"users"},
@@ -63,7 +77,6 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
return err
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
if err := router.Handle("/api/v1/invite", handler.New(provider.authZ.AdminAccess(provider.userHandler.ListInvite), handler.OpenAPIDef{
ID: "ListInvite",
Tags: []string{"users"},
@@ -81,7 +94,6 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
return err
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
if err := router.Handle("/api/v1/invite/accept", handler.New(provider.authZ.OpenAccess(provider.userHandler.AcceptInvite), handler.OpenAPIDef{
ID: "AcceptInvite",
Tags: []string{"users"},
@@ -99,22 +111,6 @@ func (provider *provider) addUserRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v1/invite/bulk", handler.New(provider.authZ.AdminAccess(provider.userHandler.CreateBulkInvite), handler.OpenAPIDef{
ID: "CreateBulkInvite",
Tags: []string{"users"},
Summary: "Create bulk invite",
Description: "This endpoint creates a bulk invite for a user",
Request: make([]*types.PostableInvite, 0),
RequestContentType: "application/json",
Response: nil,
SuccessStatusCode: http.StatusCreated,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/pats", handler.New(provider.authZ.AdminAccess(provider.userHandler.CreateAPIKey), handler.OpenAPIDef{
ID: "CreateAPIKey",
Tags: []string{"users"},

View File

@@ -17,7 +17,7 @@ func NewStore(sqlstore sqlstore.SQLStore) authtypes.AuthNStore {
return &store{sqlstore: sqlstore}
}
func (store *store) GetActiveUserAndFactorPasswordByEmailAndOrgID(ctx context.Context, email string, orgID valuer.UUID) (*types.User, *types.FactorPassword, error) {
func (store *store) GetUserAndFactorPasswordByEmailAndOrgID(ctx context.Context, email string, orgID valuer.UUID) (*types.User, *types.FactorPassword, error) {
user := new(types.User)
factorPassword := new(types.FactorPassword)
@@ -28,7 +28,6 @@ func (store *store) GetActiveUserAndFactorPasswordByEmailAndOrgID(ctx context.Co
Model(user).
Where("email = ?", email).
Where("org_id = ?", orgID).
Where("status = ?", types.UserStatusActive.StringValue()).
Scan(ctx)
if err != nil {
return nil, nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with email %s in org %s not found", email, orgID)

View File

@@ -21,7 +21,7 @@ func New(store authtypes.AuthNStore) *AuthN {
}
func (a *AuthN) Authenticate(ctx context.Context, email string, password string, orgID valuer.UUID) (*authtypes.Identity, error) {
user, factorPassword, err := a.store.GetActiveUserAndFactorPasswordByEmailAndOrgID(ctx, email, orgID)
user, factorPassword, err := a.store.GetUserAndFactorPasswordByEmailAndOrgID(ctx, email, orgID)
if err != nil {
return nil, err
}

View File

@@ -65,9 +65,6 @@ func (module *module) GetSessionContext(ctx context.Context, email valuer.Email,
return nil, err
}
// filter out deleted users
users = slices.DeleteFunc(users, func(user *types.User) bool { return user.Status == types.UserStatusDeleted })
// Since email is a valuer, we can be sure that it is a valid email and we can split it to get the domain name.
name := strings.Split(email.String(), "@")[1]
@@ -144,7 +141,7 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
roleMapping := authDomain.AuthDomainConfig().RoleMapping
role := roleMapping.NewRoleFromCallbackIdentity(callbackIdentity)
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, role, callbackIdentity.OrgID, types.UserStatusActive)
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, role, callbackIdentity.OrgID)
if err != nil {
return "", err
}

View File

@@ -77,8 +77,8 @@ func (module *getter) ListUsersByEmailAndOrgIDs(ctx context.Context, email value
return users, nil
}
func (module *getter) ActiveCountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error) {
count, err := module.store.ActiveCountByOrgID(ctx, orgID)
func (module *getter) CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error) {
count, err := module.store.CountByOrgID(ctx, orgID)
if err != nil {
return 0, err
}

View File

@@ -27,7 +27,6 @@ func NewHandler(module root.Module, getter root.Getter) root.Handler {
return &handler{module: module, getter: getter}
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
func (h *handler) AcceptInvite(w http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
@@ -71,11 +70,6 @@ func (h *handler) CreateInvite(rw http.ResponseWriter, r *http.Request) {
return
}
if len(invites) == 0 {
render.Error(rw, errors.New(errors.TypeInternal, errors.CodeInternal, "failed to create invite"))
return
}
render.Success(rw, http.StatusCreated, invites[0])
}
@@ -110,7 +104,6 @@ func (h *handler) CreateBulkInvite(rw http.ResponseWriter, r *http.Request) {
render.Success(rw, http.StatusCreated, nil)
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
func (h *handler) GetInvite(w http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
@@ -125,7 +118,6 @@ func (h *handler) GetInvite(w http.ResponseWriter, r *http.Request) {
render.Success(w, http.StatusOK, invite)
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
func (h *handler) ListInvite(w http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
@@ -145,7 +137,6 @@ func (h *handler) ListInvite(w http.ResponseWriter, r *http.Request) {
render.Success(w, http.StatusOK, invites)
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
func (h *handler) DeleteInvite(w http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()

View File

@@ -51,72 +51,40 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
}
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
func (m *Module) AcceptInvite(ctx context.Context, token string, password string) (*types.User, error) {
// token in this case is the reset password token
resetPasswordToken, err := m.store.GetResetPasswordToken(ctx, token)
invite, err := m.store.GetInviteByToken(ctx, token)
if err != nil {
return nil, err
}
// get the factor password
factorPassword, err := m.store.GetPassword(ctx, resetPasswordToken.PasswordID)
user, err := types.NewUser(invite.Name, invite.Email, invite.Role, invite.OrgID)
if err != nil {
return nil, err
}
userID := valuer.MustNewUUID(factorPassword.UserID)
err = m.UpdatePasswordByResetPasswordToken(ctx, token, password)
factorPassword, err := types.NewFactorPassword(password, user.ID.StringValue())
if err != nil {
return nil, err
}
// get the user
user, err := m.store.GetUser(ctx, userID)
err = m.CreateUser(ctx, user, root.WithFactorPassword(factorPassword))
if err != nil {
return nil, err
}
if err := m.DeleteInvite(ctx, invite.OrgID.String(), invite.ID); err != nil {
return nil, err
}
return user, nil
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
func (m *Module) GetInviteByToken(ctx context.Context, token string) (*types.Invite, error) {
// token in this case is the reset password token
resetPasswordToken, err := m.store.GetResetPasswordToken(ctx, token)
invite, err := m.store.GetInviteByToken(ctx, token)
if err != nil {
return nil, err
}
// get the factor password
factorPassword, err := m.store.GetPassword(ctx, resetPasswordToken.PasswordID)
if err != nil {
return nil, err
}
// get the user
user, err := m.store.GetUser(ctx, valuer.MustNewUUID(factorPassword.UserID))
if err != nil {
return nil, err
}
// create a dummy invite obj for backward compatibility
invite := &types.Invite{
Identifiable: types.Identifiable{
ID: resetPasswordToken.PasswordID,
},
Name: user.DisplayName,
Email: user.Email,
Token: resetPasswordToken.Token,
Role: user.Role,
OrgID: user.OrgID,
TimeAuditable: types.TimeAuditable{
CreatedAt: user.CreatedAt,
UpdatedAt: user.UpdatedAt, // dummy
},
}
return invite, nil
}
@@ -127,174 +95,80 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
return nil, err
}
invitedUsers := make([]*types.User, 0, len(bulkInvites.Invites))
var invites []*types.Invite // TODO(balanikaran) remove this and more to types.User as return
invites := make([]*types.Invite, 0, len(bulkInvites.Invites))
for _, invite := range bulkInvites.Invites {
// check and active user already exists with this email
existingUser, err := m.GetNonDeletedUserByEmailAndOrgID(ctx, invite.Email, orgID)
// check if user exists
existingUser, err := m.store.GetUserByEmailAndOrgID(ctx, invite.Email, orgID)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
if existingUser != nil {
if err := existingUser.ErrIfRoot(); err != nil {
return nil, errors.WithAdditionalf(err, "Cannot send invite to root user")
}
// check if a pending invite already exists
if existingUser.Status == types.UserStatusPendingInvite {
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "An invite already exists for this email")
return nil, errors.WithAdditionalf(err, "cannot send invite to root user")
}
}
if existingUser != nil {
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "User already exists with the same email")
}
// Check if an invite already exists
existingInvite, err := m.store.GetInviteByEmailAndOrgID(ctx, invite.Email, orgID)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return nil, err
}
if existingInvite != nil {
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "An invite already exists for this email")
}
role, err := types.NewRole(invite.Role.String())
if err != nil {
return nil, err
}
// create a new user with pending invite status
newUser, err := types.NewUser(invite.Name, invite.Email, role, orgID, types.UserStatusPendingInvite)
newInvite, err := types.NewInvite(invite.Name, role, orgID, invite.Email)
if err != nil {
return nil, err
}
// store the user and password in db
err = m.createUserWithoutGrant(ctx, newUser)
if err != nil {
return nil, err
}
invitedUsers = append(invitedUsers, newUser)
newInvite.InviteLink = fmt.Sprintf("%s/signup?token=%s", invite.FrontendBaseUrl, newInvite.Token)
invites = append(invites, newInvite)
}
// send password reset emails to all the invited users
for i, invitedUser := range invitedUsers {
m.analytics.TrackUser(ctx, orgID.String(), creator.ID.String(), "Invite Sent", map[string]any{
"invitee_email": invitedUser.Email,
"invitee_role": invitedUser.Role,
})
// generate reset password token
resetPasswordToken, err := m.GetOrCreateResetPasswordToken(ctx, invitedUser.ID)
if err != nil {
m.settings.Logger().ErrorContext(ctx, "failed to create reset password token for invited user", "error", err)
continue
}
// TODO(balanikaran): deprecate this
invite := &types.Invite{
Identifiable: types.Identifiable{
ID: resetPasswordToken.PasswordID,
},
Name: invitedUser.DisplayName,
Email: invitedUser.Email,
Token: resetPasswordToken.Token,
Role: invitedUser.Role,
OrgID: invitedUser.OrgID,
TimeAuditable: types.TimeAuditable{
CreatedAt: invitedUser.CreatedAt,
UpdatedAt: invitedUser.UpdatedAt, // dummy
},
}
invites = append(invites, invite)
frontendBaseUrl := bulkInvites.Invites[i].FrontendBaseUrl
if frontendBaseUrl == "" {
m.settings.Logger().InfoContext(ctx, "frontend base url is not provided, skipping email", "invitee_email", invitedUser.Email)
continue
}
resetLink := m.resetLink(frontendBaseUrl, resetPasswordToken.Token)
tokenLifetime := m.config.Password.Reset.MaxTokenLifetime
humanizedTokenLifetime := strings.TrimSpace(humanize.RelTime(time.Now(), time.Now().Add(tokenLifetime), "", ""))
if err := m.emailing.SendHTML(ctx, invitedUser.Email.String(), "You're Invited to Join SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
"inviter_email": creator.Email,
"link": resetLink,
"Expiry": humanizedTokenLifetime,
}); err != nil {
m.settings.Logger().ErrorContext(ctx, "failed to send invite email", "error", err)
}
}
return invites, nil // TODO(balanikaran) move to types.User
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
func (m *Module) ListInvite(ctx context.Context, orgID string) ([]*types.Invite, error) {
// find all the users with pending_invite status
users, err := m.store.ListUsersByOrgID(ctx, valuer.MustNewUUID(orgID))
err = m.store.CreateBulkInvite(ctx, invites)
if err != nil {
return nil, err
}
pendingUsers := slices.DeleteFunc(users, func(user *types.User) bool { return user.Status != types.UserStatusPendingInvite })
for i := 0; i < len(invites); i++ {
m.analytics.TrackUser(ctx, orgID.String(), creator.ID.String(), "Invite Sent", map[string]any{"invitee_email": invites[i].Email, "invitee_role": invites[i].Role})
var invites []*types.Invite
for _, pUser := range pendingUsers {
// get the reset password token
resetPasswordToken, err := m.GetOrCreateResetPasswordToken(ctx, pUser.ID)
if err != nil {
return nil, err
// if the frontend base url is not provided, we don't send the email
if bulkInvites.Invites[i].FrontendBaseUrl == "" {
m.settings.Logger().InfoContext(ctx, "frontend base url is not provided, skipping email", "invitee_email", invites[i].Email)
continue
}
// create a dummy invite obj for backward compatibility
invite := &types.Invite{
Identifiable: types.Identifiable{
ID: resetPasswordToken.PasswordID,
},
Name: pUser.DisplayName,
Email: pUser.Email,
Token: resetPasswordToken.Token,
Role: pUser.Role,
OrgID: pUser.OrgID,
TimeAuditable: types.TimeAuditable{
CreatedAt: pUser.CreatedAt,
UpdatedAt: pUser.UpdatedAt, // dummy
},
if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You're Invited to Join SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
"inviter_email": creator.Email,
"link": fmt.Sprintf("%s/signup?token=%s", bulkInvites.Invites[i].FrontendBaseUrl, invites[i].Token),
}); err != nil {
m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
}
invites = append(invites, invite)
}
return invites, nil
}
// TODO(balanikaran): deprecate this one frontend changes are live with new invitation flow
func (m *Module) ListInvite(ctx context.Context, orgID string) ([]*types.Invite, error) {
return m.store.ListInvite(ctx, orgID)
}
func (m *Module) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) error {
// the id in this case is the password id
// get the factor password
factorPassword, err := m.store.GetPassword(ctx, id)
if err != nil {
return err
}
// get the user
user, err := m.store.GetUser(ctx, valuer.MustNewUUID(factorPassword.UserID))
if err != nil {
return err
}
// revoke grants
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
err = m.authz.Revoke(ctx, user.OrgID, []string{roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)}, authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil))
if err != nil {
return err
}
// hard delete the user row
err = m.store.DeleteUser(ctx, user.OrgID.StringValue(), user.ID.StringValue())
if err != nil {
return err
}
return nil
return m.store.DeleteInvite(ctx, orgID, id)
}
func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
@@ -426,9 +300,7 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return err
}
// for now we are only soft deleting users
user.UpdateStatus(types.UserStatusDeleted)
if err := module.store.UpdateUser(ctx, orgID, user); err != nil {
if err := module.store.DeleteUser(ctx, orgID.String(), user.ID.StringValue()); err != nil {
return err
}
@@ -449,10 +321,6 @@ func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID
return nil, errors.WithAdditionalf(err, "cannot reset password for root user")
}
if user.Status == types.UserStatusDeleted {
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "user has been deleted")
}
password, err := module.store.GetPasswordByUserID(ctx, userID)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
@@ -507,7 +375,7 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
return errors.New(errors.TypeUnsupported, errors.CodeUnsupported, "Users are not allowed to reset their password themselves, please contact an admin to reset your password.")
}
user, err := module.GetNonDeletedUserByEmailAndOrgID(ctx, email, orgID)
user, err := module.store.GetUserByEmailAndOrgID(ctx, email, orgID)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
return nil // for security reasons
@@ -525,7 +393,7 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
return err
}
resetLink := module.resetLink(frontendBaseURL, token.Token)
resetLink := fmt.Sprintf("%s/password-reset?token=%s", frontendBaseURL, token.Token)
tokenLifetime := module.config.Password.Reset.MaxTokenLifetime
humanizedTokenLifetime := strings.TrimSpace(humanize.RelTime(time.Now(), time.Now().Add(tokenLifetime), "", ""))
@@ -571,21 +439,10 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
return errors.WithAdditionalf(err, "cannot reset password for root user")
}
if user.Status == types.UserStatusDeleted {
return errors.Newf(errors.TypeNotFound, types.ErrCodeUserNotFound, "user with id %s does not exist", user.ID)
}
if err := password.Update(passwd); err != nil {
return err
}
// update the status of user if this a newly invited user and also grant authz
if user.Status == types.UserStatusPendingInvite {
if err = module.activatePendingUser(ctx, user); err != nil {
return err
}
}
return module.store.UpdatePassword(ctx, password)
}
@@ -620,7 +477,7 @@ func (module *Module) UpdatePassword(ctx context.Context, userID valuer.UUID, ol
}
func (module *Module) GetOrCreateUser(ctx context.Context, user *types.User, opts ...root.CreateUserOption) (*types.User, error) {
existingUser, err := module.GetNonDeletedUserByEmailAndOrgID(ctx, user.Email, user.OrgID)
existingUser, err := module.store.GetUserByEmailAndOrgID(ctx, user.Email, user.OrgID)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return nil, err
@@ -628,14 +485,6 @@ func (module *Module) GetOrCreateUser(ctx context.Context, user *types.User, opt
}
if existingUser != nil {
// for users logging through SSO flow but are having status as pending_invite
if existingUser.Status == types.UserStatusPendingInvite {
// activate the user
if err = module.activatePendingUser(ctx, existingUser); err != nil {
return nil, err
}
}
return existingUser, nil
}
@@ -712,7 +561,7 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
func (module *Module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
stats := make(map[string]any)
count, err := module.store.ActiveCountByOrgID(ctx, orgID)
count, err := module.store.CountByOrgID(ctx, orgID)
if err == nil {
stats["user.count"] = count
}
@@ -749,49 +598,3 @@ func (module *Module) createUserWithoutGrant(ctx context.Context, input *types.U
return nil
}
func (module *Module) resetLink(frontendBaseUrl string, token string) string {
return fmt.Sprintf("%s/password-reset?token=%s", frontendBaseUrl, token)
}
func (module *Module) activatePendingUser(ctx context.Context, user *types.User) error {
err := module.authz.Grant(
ctx,
user.OrgID,
[]string{roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role)},
authtypes.MustNewSubject(authtypes.TypeableUser, user.ID.StringValue(), user.OrgID, nil),
)
if err != nil {
return err
}
user.UpdateStatus(types.UserStatusActive)
err = module.store.UpdateUser(ctx, user.OrgID, user)
if err != nil {
return err
}
return nil
}
// this function restricts that only one non-deleted user email can exist for an org ID, if found more, it throws an error
func (module *Module) GetNonDeletedUserByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*types.User, error) {
existingUsers, err := module.store.GetUsersByEmailAndOrgID(ctx, email, orgID)
if err != nil {
return nil, err
}
// filter out the deleted users
existingUsers = slices.DeleteFunc(existingUsers, func(user *types.User) bool { return user.Status == types.UserStatusDeleted })
if len(existingUsers) > 1 {
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "Multiple non-deleted users found for email %s in org_id: %s", email.StringValue(), orgID.StringValue())
}
if len(existingUsers) == 1 {
return existingUsers[0], nil
}
return nil, errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "No non-deleted user found with email %s in org_id: %s", email.StringValue(), orgID.StringValue())
}

View File

@@ -143,7 +143,7 @@ func (s *service) reconcileRootUser(ctx context.Context, orgID valuer.UUID) erro
}
func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID) error {
existingUser, err := s.module.GetNonDeletedUserByEmailAndOrgID(ctx, s.config.Email, orgID)
existingUser, err := s.store.GetUserByEmailAndOrgID(ctx, s.config.Email, orgID)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return err
}

View File

@@ -25,6 +25,77 @@ func NewStore(sqlstore sqlstore.SQLStore, settings factory.ProviderSettings) typ
return &store{sqlstore: sqlstore, settings: settings}
}
// CreateBulkInvite implements types.InviteStore.
func (store *store) CreateBulkInvite(ctx context.Context, invites []*types.Invite) error {
_, err := store.sqlstore.BunDB().NewInsert().
Model(&invites).
Exec(ctx)
if err != nil {
return store.sqlstore.WrapAlreadyExistsErrf(err, types.ErrInviteAlreadyExists, "invite with email: %s already exists in org: %s", invites[0].Email, invites[0].OrgID)
}
return nil
}
// Delete implements types.InviteStore.
func (store *store) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) error {
_, err := store.sqlstore.BunDB().NewDelete().
Model(&types.Invite{}).
Where("org_id = ?", orgID).
Where("id = ?", id).
Exec(ctx)
if err != nil {
return store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with id: %s does not exist in org: %s", id.StringValue(), orgID)
}
return nil
}
func (store *store) GetInviteByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*types.Invite, error) {
invite := new(types.Invite)
err := store.
sqlstore.
BunDBCtx(ctx).NewSelect().
Model(invite).
Where("email = ?", email).
Where("org_id = ?", orgID).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with email %s does not exist in org %s", email, orgID)
}
return invite, nil
}
func (store *store) GetInviteByToken(ctx context.Context, token string) (*types.GettableInvite, error) {
invite := new(types.Invite)
err := store.
sqlstore.
BunDBCtx(ctx).
NewSelect().
Model(invite).
Where("token = ?", token).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite does not exist", token)
}
return invite, nil
}
func (store *store) ListInvite(ctx context.Context, orgID string) ([]*types.Invite, error) {
invites := new([]*types.Invite)
err := store.sqlstore.BunDB().NewSelect().
Model(invites).
Where("org_id = ?", orgID).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrInviteNotFound, "invite with org id: %s does not exist", orgID)
}
return *invites, nil
}
func (store *store) CreatePassword(ctx context.Context, password *types.FactorPassword) error {
_, err := store.
sqlstore.
@@ -104,22 +175,21 @@ func (store *store) GetByOrgIDAndID(ctx context.Context, orgID valuer.UUID, id v
return user, nil
}
func (store *store) GetUsersByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) ([]*types.User, error) {
var users []*types.User
func (store *store) GetUserByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*types.User, error) {
user := new(types.User)
err := store.
sqlstore.
BunDBCtx(ctx).
NewSelect().
Model(&users).
Model(user).
Where("org_id = ?", orgID).
Where("email = ?", email).
Scan(ctx)
if err != nil {
return nil, err
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with email %s does not exist in org %s", email, orgID)
}
return users, nil
return user, nil
}
func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role, orgID valuer.UUID) ([]*types.User, error) {
@@ -132,7 +202,6 @@ func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role,
Model(&users).
Where("org_id = ?", orgID).
Where("role = ?", role).
Where("status != ?", types.UserStatusDeleted.StringValue()).
Scan(ctx)
if err != nil {
return nil, err
@@ -152,7 +221,6 @@ func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *typ
Column("role").
Column("is_root").
Column("updated_at").
Column("status").
Where("org_id = ?", orgID).
Where("id = ?", user.ID).
Exec(ctx)
@@ -497,7 +565,7 @@ func (store *store) GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*type
return flattenedAPIKeys[0], nil
}
func (store *store) ActiveCountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error) {
func (store *store) CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error) {
user := new(types.User)
count, err := store.
@@ -506,7 +574,6 @@ func (store *store) ActiveCountByOrgID(ctx context.Context, orgID valuer.UUID) (
NewSelect().
Model(user).
Where("org_id = ?", orgID).
Where("status = ?", types.UserStatusActive.StringValue()).
Count(ctx)
if err != nil {
return 0, err

View File

@@ -41,10 +41,10 @@ type Module interface {
// invite
CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID valuer.UUID, bulkInvites *types.PostableBulkInviteRequest) ([]*types.Invite, error)
ListInvite(ctx context.Context, orgID string) ([]*types.Invite, error) // TODO(balanikaran) - deprecate this
DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) error // TODO(balanikaran) - deprecate this
AcceptInvite(ctx context.Context, token string, password string) (*types.User, error) // TODO(balanikaran) - deprecate this
GetInviteByToken(ctx context.Context, token string) (*types.Invite, error) // TODO(balanikaran) - deprecate this
ListInvite(ctx context.Context, orgID string) ([]*types.Invite, error)
DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) error
AcceptInvite(ctx context.Context, token string, password string) (*types.User, error)
GetInviteByToken(ctx context.Context, token string) (*types.Invite, error)
// API KEY
CreateAPIKey(ctx context.Context, apiKey *types.StorableAPIKey) error
@@ -53,8 +53,6 @@ type Module interface {
RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UUID) error
GetAPIKey(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*types.StorableAPIKeyUser, error)
GetNonDeletedUserByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*types.User, error)
statsreporter.StatsCollector
}
@@ -77,8 +75,8 @@ type Getter interface {
// List users by email and org ids.
ListUsersByEmailAndOrgIDs(context.Context, valuer.Email, []valuer.UUID) ([]*types.User, error)
// Count active users by org id.
ActiveCountByOrgID(context.Context, valuer.UUID) (int64, error)
// Count users by org id.
CountByOrgID(context.Context, valuer.UUID) (int64, error)
// Get factor password by user id.
GetFactorPasswordByUserID(context.Context, valuer.UUID) (*types.FactorPassword, error)
@@ -87,10 +85,10 @@ type Getter interface {
type Handler interface {
// invite
CreateInvite(http.ResponseWriter, *http.Request)
AcceptInvite(http.ResponseWriter, *http.Request) // TODO(balanikaran) - deprecate this
GetInvite(http.ResponseWriter, *http.Request) // public function // TODO(balanikaran) - deprecate this
ListInvite(http.ResponseWriter, *http.Request) // TODO(balanikaran) - deprecate this
DeleteInvite(http.ResponseWriter, *http.Request) // TODO(balanikaran) - deprecate this
AcceptInvite(http.ResponseWriter, *http.Request)
GetInvite(http.ResponseWriter, *http.Request) // public function
ListInvite(http.ResponseWriter, *http.Request)
DeleteInvite(http.ResponseWriter, *http.Request)
CreateBulkInvite(http.ResponseWriter, *http.Request)
ListUsers(http.ResponseWriter, *http.Request)

View File

@@ -3541,45 +3541,6 @@ func (r *ClickHouseReader) GetCountOfThings(ctx context.Context, query string) (
return count, nil
}
func (r *ClickHouseReader) GetActiveHostsFromMetricMetadata(ctx context.Context, metricNames []string, hostNameAttr string, sinceUnixMilli int64) (map[string]bool, error) {
activeHosts := map[string]bool{}
query := fmt.Sprintf(
`SELECT DISTINCT attr_string_value
FROM %s.%s
WHERE metric_name IN @metricNames
AND attr_name = @attrName
AND last_reported_unix_milli >= @sinceUnixMilli`,
signozMetricDBName,
constants.SIGNOZ_METADATA_TABLENAME,
)
rows, err := r.db.Query(ctx, query,
clickhouse.Named("metricNames", metricNames),
clickhouse.Named("attrName", hostNameAttr),
clickhouse.Named("sinceUnixMilli", sinceUnixMilli),
)
if err != nil {
return nil, errorsV2.WrapInternalf(err, errorsV2.CodeInternal, "error querying active hosts")
}
defer rows.Close()
for rows.Next() {
var hostName string
if err := rows.Scan(&hostName); err != nil {
return nil, errorsV2.WrapInternalf(err, errorsV2.CodeInternal, "error scanning active host row")
}
if hostName != "" {
activeHosts[hostName] = true
}
}
if err := rows.Err(); err != nil {
return nil, errorsV2.WrapInternalf(err, errorsV2.CodeInternal, "error iterating active host rows")
}
return activeHosts, nil
}
func (r *ClickHouseReader) GetLatestReceivedMetric(
ctx context.Context, metricNames []string, labelValues map[string]string,
) (*model.MetricStatus, *model.ApiError) {

View File

@@ -253,7 +253,7 @@ func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, erro
}
// if the first org with the first user is created then the setup is complete.
if len(orgs) == 1 {
count, err := opts.Signoz.Modules.UserGetter.ActiveCountByOrgID(context.Background(), orgs[0].ID)
count, err := opts.Signoz.Modules.UserGetter.CountByOrgID(context.Background(), orgs[0].ID)
if err != nil {
zap.L().Warn("unexpected error while fetch user count while initializing base api handler", zap.Error(err))
}

View File

@@ -10,110 +10,55 @@ import (
)
var dotMetricMap = map[string]string{
"system_uptime": "system.uptime",
"system_cpu_physical_count": "system.cpu.physical.count",
"system_cpu_logical_count": "system.cpu.logical.count",
"system_cpu_time": "system.cpu.time",
"system_cpu_frequency": "system.cpu.frequency",
"system_cpu_utilization": "system.cpu.utilization",
"system_cpu_load_average_15m": "system.cpu.load_average.15m",
"system_memory_usage": "system.memory.usage",
"system_memory_limit": "system.memory.limit",
"system_memory_utilization": "system.memory.utilization",
"system_memory_linux_available": "system.memory.linux.available",
"system_memory_linux_shared": "system.memory.linux.shared",
"system_memory_linux_slab_usage": "system.memory.linux.slab.usage",
"system_paging_usage": "system.paging.usage",
"system_paging_utilization": "system.paging.utilization",
"system_paging_faults": "system.paging.faults",
"system_paging_operations": "system.paging.operations",
"system_disk_io": "system.disk.io",
"system_disk_operations": "system.disk.operations",
"system_disk_io_time": "system.disk.io_time",
"system_disk_operation_time": "system.disk.operation_time",
"system_disk_merged": "system.disk.merged",
"system_disk_limit": "system.disk.limit",
"system_filesystem_usage": "system.filesystem.usage",
"system_filesystem_utilization": "system.filesystem.utilization",
"system_filesystem_limit": "system.filesystem.limit",
"system_network_errors": "system.network.errors",
"system_network_io": "system.network.io",
"system_network_connections": "system.network.connections",
"system_network_dropped": "system.network.dropped",
"system_network_packets": "system.network.packets",
"system_processes_count": "system.processes.count",
"system_processes_created": "system.processes.created",
"system_disk_pending_operations": "system.disk.pending_operations",
"system_disk_weighted_io_time": "system.disk.weighted_io_time",
"system_filesystem_inodes_usage": "system.filesystem.inodes.usage",
"system_network_conntrack_count": "system.network.conntrack.count",
"system_network_conntrack_max": "system.network.conntrack.max",
"system_cpu_load_average_1m": "system.cpu.load_average.1m",
"system_cpu_load_average_5m": "system.cpu.load_average.5m",
"host_name": "host.name",
"k8s_cluster_name": "k8s.cluster.name",
"k8s_node_name": "k8s.node.name",
"k8s_pod_memory_usage": "k8s.pod.memory.usage",
"k8s_pod_cpu_request_utilization": "k8s.pod.cpu_request_utilization",
"k8s_pod_memory_request_utilization": "k8s.pod.memory_request_utilization",
"k8s_pod_cpu_limit_utilization": "k8s.pod.cpu_limit_utilization",
"k8s_pod_memory_limit_utilization": "k8s.pod.memory_limit_utilization",
"k8s_container_restarts": "k8s.container.restarts",
"k8s_pod_phase": "k8s.pod.phase",
"k8s_node_allocatable_cpu": "k8s.node.allocatable_cpu",
"k8s_node_allocatable_memory": "k8s.node.allocatable_memory",
"k8s_node_memory_usage": "k8s.node.memory.usage",
"k8s_node_condition_ready": "k8s.node.condition_ready",
"k8s_daemonset_desired_scheduled_nodes": "k8s.daemonset.desired_scheduled_nodes",
"k8s_daemonset_current_scheduled_nodes": "k8s.daemonset.current_scheduled_nodes",
"k8s_deployment_desired": "k8s.deployment.desired",
"k8s_deployment_available": "k8s.deployment.available",
"k8s_job_desired_successful_pods": "k8s.job.desired_successful_pods",
"k8s_job_active_pods": "k8s.job.active_pods",
"k8s_job_failed_pods": "k8s.job.failed_pods",
"k8s_job_successful_pods": "k8s.job.successful_pods",
"k8s_statefulset_desired_pods": "k8s.statefulset.desired_pods",
"k8s_statefulset_current_pods": "k8s.statefulset.current_pods",
"k8s_namespace_name": "k8s.namespace.name",
"k8s_deployment_name": "k8s.deployment.name",
"k8s_cronjob_name": "k8s.cronjob.name",
"k8s_job_name": "k8s.job.name",
"k8s_daemonset_name": "k8s.daemonset.name",
"os_type": "os.type",
"process_cgroup": "process.cgroup",
"process_pid": "process.pid",
"process_parent_pid": "process.parent_pid",
"process_owner": "process.owner",
"process_executable_path": "process.executable.path",
"process_executable_name": "process.executable.name",
"process_command_line": "process.command_line",
"process_command": "process.command",
"process_memory_usage": "process.memory.usage",
"process_memory_virtual": "process.memory.virtual",
"process_cpu_time": "process.cpu.time",
"process_disk_io": "process.disk.io",
"nfs_client_net_count": "nfs.client.net.count",
"nfs_client_net_tcp_connection_accepted": "nfs.client.net.tcp.connection.accepted",
"nfs_client_operation_count": "nfs.client.operation.count",
"nfs_client_procedure_count": "nfs.client.procedure.count",
"nfs_client_rpc_authrefresh_count": "nfs.client.rpc.authrefresh.count",
"nfs_client_rpc_count": "nfs.client.rpc.count",
"nfs_client_rpc_retransmit_count": "nfs.client.rpc.retransmit.count",
"nfs_server_fh_stale_count": "nfs.server.fh.stale.count",
"nfs_server_io": "nfs.server.io",
"nfs_server_net_count": "nfs.server.net.count",
"nfs_server_net_tcp_connection_accepted": "nfs.server.net.tcp.connection.accepted",
"nfs_server_operation_count": "nfs.server.operation.count",
"nfs_server_procedure_count": "nfs.server.procedure.count",
"nfs_server_repcache_requests": "nfs.server.repcache.requests",
"nfs_server_rpc_count": "nfs.server.rpc.count",
"nfs_server_thread_count": "nfs.server.thread.count",
"k8s_persistentvolumeclaim_name": "k8s.persistentvolumeclaim.name",
"k8s_volume_available": "k8s.volume.available",
"k8s_volume_capacity": "k8s.volume.capacity",
"k8s_volume_inodes": "k8s.volume.inodes",
"k8s_volume_inodes_free": "k8s.volume.inodes.free",
"system_filesystem_usage": "system.filesystem.usage",
"system_cpu_time": "system.cpu.time",
"system_memory_usage": "system.memory.usage",
"system_cpu_load_average_15m": "system.cpu.load_average.15m",
"host_name": "host.name",
"k8s_cluster_name": "k8s.cluster.name",
"k8s_node_name": "k8s.node.name",
"k8s_pod_memory_usage": "k8s.pod.memory.usage",
"k8s_pod_cpu_request_utilization": "k8s.pod.cpu_request_utilization",
"k8s_pod_memory_request_utilization": "k8s.pod.memory_request_utilization",
"k8s_pod_cpu_limit_utilization": "k8s.pod.cpu_limit_utilization",
"k8s_pod_memory_limit_utilization": "k8s.pod.memory_limit_utilization",
"k8s_container_restarts": "k8s.container.restarts",
"k8s_pod_phase": "k8s.pod.phase",
"k8s_node_allocatable_cpu": "k8s.node.allocatable_cpu",
"k8s_node_allocatable_memory": "k8s.node.allocatable_memory",
"k8s_node_memory_usage": "k8s.node.memory.usage",
"k8s_node_condition_ready": "k8s.node.condition_ready",
"k8s_daemonset_desired_scheduled_nodes": "k8s.daemonset.desired_scheduled_nodes",
"k8s_daemonset_current_scheduled_nodes": "k8s.daemonset.current_scheduled_nodes",
"k8s_deployment_desired": "k8s.deployment.desired",
"k8s_deployment_available": "k8s.deployment.available",
"k8s_job_desired_successful_pods": "k8s.job.desired_successful_pods",
"k8s_job_active_pods": "k8s.job.active_pods",
"k8s_job_failed_pods": "k8s.job.failed_pods",
"k8s_job_successful_pods": "k8s.job.successful_pods",
"k8s_statefulset_desired_pods": "k8s.statefulset.desired_pods",
"k8s_statefulset_current_pods": "k8s.statefulset.current_pods",
"k8s_namespace_name": "k8s.namespace.name",
"k8s_deployment_name": "k8s.deployment.name",
"k8s_cronjob_name": "k8s.cronjob.name",
"k8s_job_name": "k8s.job.name",
"k8s_daemonset_name": "k8s.daemonset.name",
"os_type": "os.type",
"process_cgroup": "process.cgroup",
"process_pid": "process.pid",
"process_parent_pid": "process.parent_pid",
"process_owner": "process.owner",
"process_executable_path": "process.executable.path",
"process_executable_name": "process.executable.name",
"process_command_line": "process.command_line",
"process_command": "process.command",
"process_memory_usage": "process.memory.usage",
"k8s_persistentvolumeclaim_name": "k8s.persistentvolumeclaim.name",
"k8s_volume_available": "k8s.volume.available",
"k8s_volume_capacity": "k8s.volume.capacity",
"k8s_volume_inodes": "k8s.volume.inodes",
"k8s_volume_inodes_free": "k8s.volume.inodes.free",
// add additional mappings as needed
"k8s_pod_uid": "k8s.pod.uid",
"k8s_pod_name": "k8s.pod.name",

View File

@@ -73,53 +73,6 @@ var (
"load15": GetDotMetrics("system_cpu_load_average_15m"),
"wait": GetDotMetrics("system_cpu_time"),
}
uniqueMetricNamesForHosts = []string{
GetDotMetrics("system_uptime"),
GetDotMetrics("system_cpu_time"),
GetDotMetrics("system_cpu_load_average_1m"),
GetDotMetrics("system_cpu_load_average_5m"),
GetDotMetrics("system_cpu_load_average_15m"),
GetDotMetrics("system_memory_usage"),
GetDotMetrics("system_paging_usage"),
GetDotMetrics("system_paging_faults"),
GetDotMetrics("system_paging_operations"),
GetDotMetrics("system_disk_io"),
GetDotMetrics("system_disk_operations"),
GetDotMetrics("system_disk_io_time"),
GetDotMetrics("system_disk_operation_time"),
GetDotMetrics("system_disk_merged"),
GetDotMetrics("system_disk_pending_operations"),
GetDotMetrics("system_disk_weighted_io_time"),
GetDotMetrics("system_filesystem_usage"),
GetDotMetrics("system_filesystem_inodes_usage"),
GetDotMetrics("system_network_io"),
GetDotMetrics("system_network_errors"),
GetDotMetrics("system_network_connections"),
GetDotMetrics("system_network_dropped"),
GetDotMetrics("system_network_packets"),
GetDotMetrics("system_processes_count"),
GetDotMetrics("system_processes_created"),
GetDotMetrics("process_cpu_time"),
GetDotMetrics("process_disk_io"),
GetDotMetrics("process_memory_usage"),
GetDotMetrics("process_memory_virtual"),
GetDotMetrics("nfs_client_net_count"),
GetDotMetrics("nfs_client_net_tcp_connection_accepted"),
GetDotMetrics("nfs_client_operation_count"),
GetDotMetrics("nfs_client_procedure_count"),
GetDotMetrics("nfs_client_rpc_authrefresh_count"),
GetDotMetrics("nfs_client_rpc_count"),
GetDotMetrics("nfs_client_rpc_retransmit_count"),
GetDotMetrics("nfs_server_fh_stale_count"),
GetDotMetrics("nfs_server_io"),
GetDotMetrics("nfs_server_net_count"),
GetDotMetrics("nfs_server_net_tcp_connection_accepted"),
GetDotMetrics("nfs_server_operation_count"),
GetDotMetrics("nfs_server_procedure_count"),
GetDotMetrics("nfs_server_repcache_requests"),
GetDotMetrics("nfs_server_rpc_count"),
GetDotMetrics("nfs_server_thread_count"),
}
)
func NewHostsRepo(reader interfaces.Reader, querierV2 interfaces.Querier) *HostsRepo {
@@ -178,9 +131,62 @@ func (h *HostsRepo) GetHostAttributeValues(ctx context.Context, req v3.FilterAtt
return &v3.FilterAttributeValueResponse{StringAttributeValues: hostNames}, nil
}
func (h *HostsRepo) getActiveHosts(ctx context.Context) (map[string]bool, error) {
tenMinAgo := time.Now().Add(-10 * time.Minute).UTC().UnixMilli()
return h.reader.GetActiveHostsFromMetricMetadata(ctx, uniqueMetricNamesForHosts, hostNameAttrKey, tenMinAgo)
func (h *HostsRepo) getActiveHosts(ctx context.Context, orgID valuer.UUID, req model.HostListRequest) (map[string]bool, error) {
activeStatus := map[string]bool{}
step := common.MinAllowedStepInterval(req.Start, req.End)
hasHostName := false
for _, key := range req.GroupBy {
if key.Key == hostNameAttrKey {
hasHostName = true
}
}
if !hasHostName {
req.GroupBy = append(req.GroupBy, v3.AttributeKey{Key: hostNameAttrKey})
}
params := v3.QueryRangeParamsV3{
Start: time.Now().Add(-time.Minute * 10).UTC().UnixMilli(),
End: time.Now().UTC().UnixMilli(),
Step: step,
CompositeQuery: &v3.CompositeQuery{
BuilderQueries: map[string]*v3.BuilderQuery{
"A": {
QueryName: "A",
StepInterval: step,
DataSource: v3.DataSourceMetrics,
AggregateAttribute: v3.AttributeKey{
Key: metricToUseForHostAttributes,
DataType: v3.AttributeKeyDataTypeFloat64,
},
Temporality: v3.Unspecified,
Filters: req.Filters,
GroupBy: req.GroupBy,
Expression: "A",
TimeAggregation: v3.TimeAggregationAvg,
SpaceAggregation: v3.SpaceAggregationAvg,
Disabled: false,
},
},
QueryType: v3.QueryTypeBuilder,
PanelType: v3.PanelTypeGraph,
},
}
queryResponse, _, err := h.querierV2.QueryRange(ctx, orgID, &params)
if err != nil {
return nil, err
}
for _, result := range queryResponse {
for _, series := range result.Series {
name := series.Labels[hostNameAttrKey]
activeStatus[name] = true
}
}
return activeStatus, nil
}
func (h *HostsRepo) getMetadataAttributes(ctx context.Context, req model.HostListRequest) (map[string]map[string]string, error) {
@@ -444,7 +450,7 @@ func (h *HostsRepo) GetHostList(ctx context.Context, orgID valuer.UUID, req mode
return resp, err
}
activeHosts, err := h.getActiveHosts(ctx)
activeHosts, err := h.getActiveHosts(ctx, orgID, req)
if err != nil {
return resp, err
}

View File

@@ -99,7 +99,6 @@ type Reader interface {
SubscribeToQueryProgress(queryId string) (<-chan model.QueryProgress, func(), *model.ApiError)
GetCountOfThings(ctx context.Context, query string) (uint64, error)
GetActiveHostsFromMetricMetadata(ctx context.Context, metricNames []string, hostNameAttr string, sinceUnixMilli int64) (map[string]bool, error)
GetMetricsExistenceAndEarliestTime(ctx context.Context, metricNames []string) (uint64, uint64, error)

View File

@@ -170,8 +170,6 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
sqlmigration.NewAddUserEmailOrgIDIndexFactory(sqlstore, sqlschema),
sqlmigration.NewMigrateRulesV4ToV5Factory(sqlstore, telemetryStore),
sqlmigration.NewAddStatusUserFactory(sqlstore, sqlschema),
sqlmigration.NewDeprecateUserInviteFactory(sqlstore, sqlschema),
)
}

View File

@@ -1,89 +0,0 @@
package sqlmigration
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type addStatusUser struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewAddStatusUserFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(
factory.MustNewName("add_status_user"),
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return &addStatusUser{
sqlstore: sqlstore,
sqlschema: sqlschema,
}, nil
},
)
}
func (migration *addStatusUser) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addStatusUser) Up(ctx context.Context, db *bun.DB) error {
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
if err != nil {
return err
}
column := &sqlschema.Column{
Name: sqlschema.ColumnName("status"),
DataType: sqlschema.DataTypeText,
Nullable: false,
}
sqls := migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, column, "active")
// we need to drop the unique index on (email, org_id)
indexSqls := migration.sqlschema.Operator().DropIndex(&sqlschema.UniqueIndex{TableName: "users", ColumnNames: []sqlschema.ColumnName{"email", "org_id"}})
sqls = append(sqls, indexSqls...)
for _, sql := range sqls {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil {
return err
}
return nil
}
func (migration *addStatusUser) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@@ -1,141 +0,0 @@
package sqlmigration
import (
"context"
"database/sql"
"time"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type deprecateUserInvite struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewDeprecateUserInviteFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(
factory.MustNewName("deprecate_user_invite"),
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return &deprecateUserInvite{
sqlstore: sqlstore,
sqlschema: sqlschema,
}, nil
},
)
}
func (migration *deprecateUserInvite) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
type userInviteRow struct {
bun.BaseModel `bun:"table:user_invite"`
ID string `bun:"id"`
Name string `bun:"name"`
Email string `bun:"email"`
Role string `bun:"role"`
OrgID string `bun:"org_id"`
Token string `bun:"token"`
CreatedAt time.Time `bun:"created_at"`
UpdatedAt time.Time `bun:"updated_at"`
}
type pendingInviteUser struct {
bun.BaseModel `bun:"table:users"`
ID string `bun:"id"`
DisplayName string `bun:"display_name"`
Email string `bun:"email"`
Role string `bun:"role"`
OrgID string `bun:"org_id"`
IsRoot bool `bun:"is_root"`
Status string `bun:"status"`
CreatedAt time.Time `bun:"created_at"`
UpdatedAt time.Time `bun:"updated_at"`
}
func (migration *deprecateUserInvite) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
// existing invites
var invites []*userInviteRow
err = tx.NewSelect().Model(&invites).Scan(ctx)
if err != nil && err != sql.ErrNoRows {
return err
}
// move all invitations to the users table as a pending_invite user
// skipping any invite whose email+org already has a user entry with non-deleted status
for _, invite := range invites {
existingCount, err := tx.NewSelect().
TableExpr("users").
Where("email = ?", invite.Email).
Where("org_id = ?", invite.OrgID).
Where("status != ?", "deleted").
Count(ctx)
if err != nil {
return err
}
if existingCount > 0 {
continue
}
user := &pendingInviteUser{
ID: invite.ID,
DisplayName: invite.Name,
Email: invite.Email,
Role: invite.Role,
OrgID: invite.OrgID,
IsRoot: false,
Status: "pending_invite",
CreatedAt: invite.CreatedAt,
UpdatedAt: time.Now(),
}
if _, err = tx.NewInsert().Model(user).Exec(ctx); err != nil {
return err
}
}
// finally drop the user_invite table
table, _, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("user_invite"))
if err != nil {
return err
}
dropTableSqls := migration.sqlschema.Operator().DropTable(table)
for _, sql := range dropTableSqls {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
return nil
}
func (migration *deprecateUserInvite) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@@ -125,7 +125,7 @@ func (typ *Identity) ToClaims() Claims {
type AuthNStore interface {
// Get user and factor password by email and orgID.
GetActiveUserAndFactorPasswordByEmailAndOrgID(ctx context.Context, email string, orgID valuer.UUID) (*types.User, *types.FactorPassword, error)
GetUserAndFactorPasswordByEmailAndOrgID(ctx context.Context, email string, orgID valuer.UUID) (*types.User, *types.FactorPassword, error)
// Get org domain from id.
GetAuthDomainFromID(ctx context.Context, domainID valuer.UUID) (*AuthDomain, error)

View File

@@ -14,17 +14,6 @@ var (
ErrInviteNotFound = errors.MustNewCode("invite_not_found")
)
type PostableInvite struct {
Name string `json:"name"`
Email valuer.Email `json:"email"`
Role Role `json:"role"`
FrontendBaseUrl string `json:"frontendBaseUrl"`
}
type PostableBulkInviteRequest struct {
Invites []PostableInvite `json:"invites"`
}
type GettableInvite = Invite
type Invite struct {
@@ -57,6 +46,17 @@ type PostableAcceptInvite struct {
SourceURL string `json:"sourceUrl"`
}
type PostableInvite struct {
Name string `json:"name"`
Email valuer.Email `json:"email"`
Role Role `json:"role"`
FrontendBaseUrl string `json:"frontendBaseUrl"`
}
type PostableBulkInviteRequest struct {
Invites []PostableInvite `json:"invites"`
}
type GettableCreateInviteResponse struct {
InviteToken string `json:"token"`
}

View File

@@ -23,25 +23,17 @@ var (
ErrCodeRootUserOperationUnsupported = errors.MustNewCode("root_user_operation_unsupported")
)
var (
UserStatusPendingInvite = valuer.NewString("pending_invite")
UserStatusActive = valuer.NewString("active")
UserStatusDeleted = valuer.NewString("deleted")
ValidUserStatus = []valuer.String{UserStatusPendingInvite, UserStatusActive, UserStatusDeleted}
)
type GettableUser = User
type User struct {
bun.BaseModel `bun:"table:users"`
Identifiable
DisplayName string `bun:"display_name" json:"displayName"`
Email valuer.Email `bun:"email" json:"email"`
Role Role `bun:"role" json:"role"`
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
IsRoot bool `bun:"is_root" json:"isRoot"`
Status valuer.String `bun:"status" json:"status"`
DisplayName string `bun:"display_name" json:"displayName"`
Email valuer.Email `bun:"email" json:"email"`
Role Role `bun:"role" json:"role"`
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
IsRoot bool `bun:"is_root" json:"isRoot"`
TimeAuditable
}
@@ -53,7 +45,7 @@ type PostableRegisterOrgAndAdmin struct {
OrgName string `json:"orgName"`
}
func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUID, status valuer.String) (*User, error) {
func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUID) (*User, error) {
if email.IsZero() {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required")
}
@@ -75,7 +67,6 @@ func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUI
Role: role,
OrgID: orgID,
IsRoot: false,
Status: status,
TimeAuditable: TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
@@ -101,7 +92,6 @@ func NewRootUser(displayName string, email valuer.Email, orgID valuer.UUID) (*Us
Role: RoleAdmin,
OrgID: orgID,
IsRoot: true,
Status: UserStatusActive,
TimeAuditable: TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
@@ -121,11 +111,6 @@ func (u *User) Update(displayName string, role Role) {
u.UpdatedAt = time.Now()
}
func (u *User) UpdateStatus(status valuer.String) {
u.Status = status
u.UpdatedAt = time.Now()
}
// PromoteToRoot promotes the user to a root user with admin role.
func (u *User) PromoteToRoot() {
u.IsRoot = true
@@ -154,7 +139,6 @@ func NewTraitsFromUser(user *User) map[string]any {
"role": user.Role,
"email": user.Email.String(),
"display_name": user.DisplayName,
"status": user.Status,
"created_at": user.CreatedAt,
}
}
@@ -176,6 +160,17 @@ func (request *PostableRegisterOrgAndAdmin) UnmarshalJSON(data []byte) error {
}
type UserStore interface {
// invite
CreateBulkInvite(ctx context.Context, invites []*Invite) error
ListInvite(ctx context.Context, orgID string) ([]*Invite, error)
DeleteInvite(ctx context.Context, orgID string, id valuer.UUID) error
// Get invite by token.
GetInviteByToken(ctx context.Context, token string) (*Invite, error)
// Get invite by email and org.
GetInviteByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*Invite, error)
// Creates a user.
CreateUser(ctx context.Context, user *User) error
@@ -186,7 +181,7 @@ type UserStore interface {
GetByOrgIDAndID(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*User, error)
// Get user by email and orgID.
GetUsersByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) ([]*User, error)
GetUserByEmailAndOrgID(ctx context.Context, email valuer.Email, orgID valuer.UUID) (*User, error)
// Get users by email.
GetUsersByEmail(ctx context.Context, email valuer.Email) ([]*User, error)
@@ -221,7 +216,7 @@ type UserStore interface {
GetAPIKey(ctx context.Context, orgID, id valuer.UUID) (*StorableAPIKeyUser, error)
CountAPIKeyByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error)
ActiveCountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error)
CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error)
// Get root user by org.
GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*User, error)

View File

@@ -5,7 +5,7 @@
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>{{.subject}}</title>
<title>You're Invited to Join SigNoz</title>
</head>
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
@@ -41,13 +41,13 @@
</tr>
</table>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Click the button below to set your password and activate your account:
Accept the invitation to get started.
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td align="center">
<a href="{{.link}}" target="_blank" style="display:inline-block;padding:16px 48px;font-size:16px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
Set Password
Accept Invitation
</a>
</td>
</tr>
@@ -60,18 +60,6 @@
{{.link}}
</a>
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td style="padding:16px;background:#fff4e6;border-radius:6px;border-left:4px solid #ff9800">
<p style="margin:0;font-size:14px;color:#333;line-height:1.6">
<strong>&#9201; This link will expire in {{.Expiry}}.</strong>
</p>
</td>
</tr>
</table>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
If you didn't expect this invitation, please ignore this email. No account will be activated.
</p>
{{ if .format.Help.Enabled }}
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.

View File

@@ -4,7 +4,6 @@ from typing import Any, Callable, Dict, List
import requests
from selenium import webdriver
from sqlalchemy import sql
from wiremock.resources.mappings import Mapping
from fixtures.auth import (
@@ -571,123 +570,3 @@ def test_saml_empty_name_fallback(
assert found_user is not None
assert found_user["role"] == "VIEWER"
def test_saml_sso_login_activates_pending_invite_user(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Verify that an invited user (pending_invite) who logs in via SAML SSO is
auto-activated with the role from the invite, not the SSO default/group role.
1. Admin invites user as ADMIN
2. User exists in IDP with 'signoz-viewers' group (would normally get VIEWER)
3. SSO login activates the user with ADMIN role (invite role wins)
"""
email = "sso-pending-invite@saml.integration.test"
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Invite user as ADMIN
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": email, "role": "ADMIN", "name": "SAML SSO Pending User"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
assert response.json()["data"]["status"] == "pending_invite"
# Create IDP user in viewer group — SSO would normally assign VIEWER
create_user_idp_with_groups(email, "password", True, ["signoz-viewers"])
perform_saml_login(
signoz, driver, get_session_context, idp_login, email, "password"
)
# User should be active with ADMIN role from invite, not VIEWER from SSO
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["status"] == "active"
assert found_user["role"] == "ADMIN"
def test_saml_sso_deleted_user_gets_new_user_on_login(
signoz: SigNoz,
idp: TestContainerIDP, # pylint: disable=unused-argument
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str, bool, str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Verify the full deleted-user SAML SSO lifecycle:
1. Invite + activate a user (EDITOR)
2. Soft delete the user
3. SSO login attempt — user should remain deleted (blocked)
5. SSO login — new user should created
"""
email = "sso-deleted-lifecycle@saml.integration.test"
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# --- Step 1: Invite and activate via password reset ---
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": email, "role": "EDITOR", "name": "SAML SSO Lifecycle User"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
user_id = response.json()["data"]["id"]
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{user_id}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": response.json()["data"]["token"]},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# --- Step 2: Soft delete via DB (feature flag may not be enabled) ---
with signoz.sqlstore.conn.connect() as conn:
conn.execute(
sql.text("UPDATE users SET status = 'deleted' WHERE id = :user_id"),
{"user_id": user_id},
)
conn.commit()
# --- Step 3: SSO login should be blocked for deleted user ---
create_user_idp(email, "password", True, "SAML", "Lifecycle")
perform_saml_login(signoz, driver, get_session_context, idp_login, email, "password")
# Verify user is NOT reactivated — check via DB since API may filter deleted users
with signoz.sqlstore.conn.connect() as conn:
result = conn.execute(
sql.text("SELECT status FROM users WHERE id = :user_id"),
{"user_id": user_id},
)
row = result.fetchone()
assert row is not None
assert row[0] == "deleted"
# Verify a NEW active user was auto-provisioned via SSO
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["status"] == "active"
assert found_user["id"] != user_id # new user, different ID
assert found_user["role"] == "VIEWER" # default role from SSO domain config

View File

@@ -4,7 +4,6 @@ from urllib.parse import urlparse
import requests
from selenium import webdriver
from sqlalchemy import sql
from wiremock.resources.mappings import Mapping
from fixtures.auth import (
@@ -533,142 +532,3 @@ def test_oidc_empty_name_uses_fallback(
assert found_user is not None
assert found_user["role"] == "VIEWER"
# Note: displayName may be empty - this is a known limitation
def test_oidc_sso_login_activates_pending_invite_user(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp_with_groups: Callable[[str, str, bool, List[str]], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Verify that an invited user (pending_invite) who logs in via OIDC SSO is
auto-activated with the role from the invite, not the SSO default/group role.
1. Admin invites user as ADMIN
2. User exists in IDP with 'signoz-viewers' group (would normally get VIEWER)
3. SSO login activates the user with ADMIN role (invite role wins)
"""
email = "sso-pending-invite@oidc.integration.test"
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Invite user as ADMIN
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": email, "role": "ADMIN", "name": "OIDC SSO Pending User"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
assert response.json()["data"]["status"] == "pending_invite"
# Create IDP user in viewer group — SSO would normally assign VIEWER
create_user_idp_with_groups(email, "password123", True, ["signoz-viewers"])
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
# User should be active with ADMIN role from invite, not VIEWER from SSO
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["status"] == "active"
assert found_user["role"] == "ADMIN"
def test_oidc_sso_deleted_user_blocked_and_reinvite_activates(
signoz: SigNoz,
idp: TestContainerIDP,
driver: webdriver.Chrome,
create_user_idp: Callable[[str, str, bool, str, str], None],
idp_login: Callable[[str, str], None],
get_token: Callable[[str, str], str],
get_session_context: Callable[[str], str],
) -> None:
"""
Verify the full deleted-user OIDC SSO lifecycle:
1. Invite + activate a user (EDITOR)
2. Soft delete the user
3. SSO login attempt — user should remain deleted (blocked)
4. Re-invite the same email as VIEWER
5. SSO login — user should become active with VIEWER role
"""
email = "sso-deleted-lifecycle@oidc.integration.test"
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# --- Step 1: Invite and activate via password reset ---
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": email, "role": "EDITOR", "name": "OIDC SSO Lifecycle User"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
user_id = response.json()["data"]["id"]
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{user_id}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": response.json()["data"]["token"]},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# --- Step 2: Soft delete via DB (feature flag may not be enabled) ---
with signoz.sqlstore.conn.connect() as conn:
conn.execute(
sql.text("UPDATE users SET status = 'deleted' WHERE id = :user_id"),
{"user_id": user_id},
)
conn.commit()
# --- Step 3: SSO login should be blocked for deleted user ---
create_user_idp(email, "password123", True, "OIDC", "Lifecycle")
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
# Verify user is NOT reactivated — check via DB since API may filter deleted users
with signoz.sqlstore.conn.connect() as conn:
result = conn.execute(
sql.text("SELECT status FROM users WHERE id = :user_id"),
{"user_id": user_id},
)
row = result.fetchone()
assert row is not None
assert row[0] == "deleted"
# --- Step 4: Re-invite as VIEWER ---
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": email, "role": "VIEWER", "name": "OIDC SSO Lifecycle User v2"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
assert response.json()["data"]["status"] == "pending_invite"
assert response.json()["data"]["role"] == "VIEWER"
# --- Step 5: SSO login should activate with new role ---
driver.delete_all_cookies()
perform_oidc_login(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["status"] == "active"
assert found_user["role"] == "VIEWER"

View File

@@ -104,76 +104,68 @@ def test_register(signoz: types.SigNoz, get_token: Callable[[str, str], str]) ->
def test_invite_and_register(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
admin_token = get_token("admin@integration.test", "password123Z$")
# Generate an invite token for the editor user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "editor@integration.test", "role": "EDITOR", "name": "editor"},
timeout=2,
headers={
"Authorization": f"Bearer {admin_token}"
"Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}"
},
)
assert response.status_code == HTTPStatus.CREATED
invited_user = response.json()["data"]
assert invited_user["email"] == "editor@integration.test"
assert invited_user["status"] == "pending_invite"
assert invited_user["role"] == "EDITOR"
# Verify the user user appears in the users list but as pending_invite status
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
signoz.self.host_configs["8080"].get("/api/v1/invite"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
headers={
"Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}"
},
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(user for user in user_response if user["email"] == "editor@integration.test"),
invite_response = response.json()["data"]
found_invite = next(
(
invite
for invite in invite_response
if invite["email"] == "editor@integration.test"
),
None,
)
assert found_user is not None
assert found_user["status"] == "pending_invite"
assert invited_user["role"] == "EDITOR"
# Get the reset password token through admin token
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{invited_user['id']}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
reset_token = response.json()["data"]["token"]
# Reset the password to complete the invite flow (activates the user and also grants authz)
# Register the editor user using the invite token
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": reset_token},
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={
"password": "password123Z$",
"displayName": "editor",
"token": f"{found_invite['token']}",
},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
assert response.status_code == HTTPStatus.CREATED
# Verify the user can now log in
editor_token = get_token("editor@integration.test", "password123Z$")
assert editor_token is not None
# Verify that the invite token has been deleted
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v1/invite/{found_invite['token']}"),
timeout=2,
)
assert response.status_code in (HTTPStatus.NOT_FOUND, HTTPStatus.BAD_REQUEST)
# Verify that an admin endpoint cannot be called by the editor user
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={
"Authorization": f"Bearer {editor_token}"
"Authorization": f"Bearer {get_token("editor@integration.test", "password123Z$")}"
},
)
assert response.status_code == HTTPStatus.FORBIDDEN
# Verify that the editor user status has been updated to ACTIVE
# Verify that the editor has been created
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
@@ -194,50 +186,59 @@ def test_invite_and_register(
assert found_user["role"] == "EDITOR"
assert found_user["displayName"] == "editor"
assert found_user["email"] == "editor@integration.test"
assert found_user["status"] == "active"
def test_revoke_invite_and_register(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
admin_token = get_token("admin@integration.test", "password123Z$")
# Invite the viewer user
# Generate an invite token for the viewer user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "viewer@integration.test", "role": "VIEWER"},
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.CREATED
invited_user = response.json()["data"]
# Get reset password token before revoking
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{invited_user['id']}"
),
headers={"Authorization": f"Bearer {admin_token}"},
signoz.self.host_configs["8080"].get("/api/v1/invite"),
timeout=2,
headers={
"Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}"
},
)
invite_response = response.json()["data"]
found_invite = next(
(
invite
for invite in invite_response
if invite["email"] == "viewer@integration.test"
),
None,
)
assert response.status_code == HTTPStatus.OK
reset_token = response.json()["data"]["token"]
# Delete the pending invite user (revoke the invite)
response = requests.delete(
signoz.self.host_configs["8080"].get(f"/api/v1/user/{invited_user['id']}"),
signoz.self.host_configs["8080"].get(f"/api/v1/invite/{found_invite['id']}"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.NO_CONTENT
# Try to use the reset token — should fail (user deleted)
# Try registering the viewer user with the invite token
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": reset_token},
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={
"password": "password123Z$",
"displayName": "viewer",
"token": f"{found_invite["token"]}",
},
timeout=2,
)
assert response.status_code in (HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)

View File

@@ -22,27 +22,50 @@ def test_change_password(
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.CREATED
invited_user = response.json()["data"]
# Get reset password token
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{invited_user['id']}"
),
signoz.self.host_configs["8080"].get("/api/v1/invite"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
reset_token = response.json()["data"]["token"]
# Reset password to activate user
invite_response = response.json()["data"]
found_invite = next(
(
invite
for invite in invite_response
if invite["email"] == "admin+password@integration.test"
),
None,
)
# Accept the invite with a bad password which should fail
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": reset_token},
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={
"password": "password",
"displayName": "admin password",
"token": f"{found_invite['token']}",
},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
assert response.status_code == HTTPStatus.BAD_REQUEST
# Accept the invite with a good password
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={
"password": "password123Z$",
"displayName": "admin password",
"token": f"{found_invite['token']}",
},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
# Get the user id
response = requests.get(
@@ -278,25 +301,33 @@ def test_forgot_password_creates_reset_token(
)
assert response.status_code == HTTPStatus.CREATED
invited_user = response.json()["data"]
# Activate user via reset password
# Get the invite token
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{invited_user['id']}"
),
signoz.self.host_configs["8080"].get("/api/v1/invite"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
reset_token = response.json()["data"]["token"]
invite_response = response.json()["data"]
found_invite = next(
(
invite
for invite in invite_response
if invite["email"] == "forgot@integration.test"
),
None,
)
# Accept the invite to create the user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "originalPassword123Z$", "token": reset_token},
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={
"password": "originalPassword123Z$",
"displayName": "forgotpassword user",
"token": f"{found_invite['token']}",
},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
assert response.status_code == HTTPStatus.CREATED
# Get org ID
response = requests.get(

View File

@@ -23,25 +23,20 @@ def test_change_role(
assert response.status_code == HTTPStatus.CREATED
invited_user = response.json()["data"]
# Activate user via reset password
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{invited_user['id']}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
reset_token = response.json()["data"]["token"]
invite_token = response.json()["data"]["token"]
# Accept the invite of the new user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": reset_token},
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={
"password": "password123Z$",
"displayName": "role change user",
"token": f"{invite_token}",
},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
assert response.status_code == HTTPStatus.CREATED
# Make some API calls as new user
new_user_token, new_user_refresh_token = get_tokens(

View File

@@ -20,48 +20,43 @@ def test_duplicate_user_invite_rejected(
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Invite a new user
response = requests.post(
# Step 1: Invite a new user.
initial_invite_response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": DUPLICATE_USER_EMAIL, "role": "EDITOR"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
invited_user = response.json()["data"]
assert initial_invite_response.status_code == HTTPStatus.CREATED
initial_invite_token = initial_invite_response.json()["data"]["token"]
# Invite the same email again — should fail
response = requests.post(
# Step 2: Accept the invite to create the user.
initial_accept_response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={"token": initial_invite_token, "password": "password123Z$"},
timeout=2,
)
assert initial_accept_response.status_code == HTTPStatus.CREATED
# Step 3: Invite the same email again.
duplicate_invite_response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": DUPLICATE_USER_EMAIL, "role": "VIEWER"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CONFLICT
# Activate the user via reset password
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{invited_user['id']}"
),
headers={"Authorization": f"Bearer {admin_token}"},
# The invite creation itself may be rejected if the app checks for existing users.
if duplicate_invite_response.status_code != HTTPStatus.CREATED:
assert duplicate_invite_response.status_code == HTTPStatus.CONFLICT
return
duplicate_invite_token = duplicate_invite_response.json()["data"]["token"]
# Step 4: Accept the duplicate invite — should fail due to unique constraint.
duplicate_accept_response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json={"token": duplicate_invite_token, "password": "password123Z$"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
reset_token = response.json()["data"]["token"]
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": reset_token},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# Try to invite the same email again — should fail
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": DUPLICATE_USER_EMAIL, "role": "VIEWER"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CONFLICT
assert duplicate_accept_response.status_code == HTTPStatus.CONFLICT

View File

@@ -1,116 +0,0 @@
from http import HTTPStatus
from typing import Callable
import requests
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.types import SigNoz
from sqlalchemy import sql
def test_reinvite_deleted_user(
signoz: SigNoz,
get_token: Callable[[str, str], str],
):
"""
Verify that a deleted user can be re-invited:
1. Invite and activate a user
2. Soft delete the user
3. Re-invite the same email — should succeed and reactivate as pending_invite
4. Reset password — user becomes active again
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Create and activate a user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "reinvite@integration.test", "role": "EDITOR", "name": "reinvite user"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
invited_user = response.json()["data"]
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{invited_user['id']}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
reset_token = response.json()["data"]["token"]
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": reset_token},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# Soft delete the user (set status to deleted via DB since feature flag may not be enabled)
with signoz.sqlstore.conn.connect() as conn:
conn.execute(
sql.text("UPDATE users SET status = 'deleted' WHERE id = :user_id"),
{"user_id": invited_user["id"]},
)
conn.commit()
# Re-invite the same email — should succeed
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "reinvite@integration.test", "role": "VIEWER", "name": "reinvite user v2"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
reinvited_user = response.json()["data"]
assert reinvited_user["status"] == "pending_invite"
assert reinvited_user["role"] == "VIEWER"
assert reinvited_user["id"] != invited_user["id"] # confirms a new user was created
# Activate via reset password
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{reinvited_user['id']}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
reset_token = response.json()["data"]["token"]
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "newPassword123Z$", "token": reset_token},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# Verify user can log in with new password
user_token = get_token("reinvite@integration.test", "newPassword123Z$")
assert user_token is not None
def test_bulk_invite(
signoz: SigNoz,
get_token: Callable[[str, str], str],
):
"""
Verify the bulk invite endpoint creates multiple pending_invite users.
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite/bulk"),
json={
"invites": [
{"email": "bulk1@integration.test", "role": "EDITOR", "name": "bulk user 1"},
{"email": "bulk2@integration.test", "role": "VIEWER", "name": "bulk user 2"},
]
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.CREATED

View File

@@ -34,25 +34,19 @@ def test_user_invite_accept_role_grant(
timeout=2,
)
assert invite_response.status_code == HTTPStatus.CREATED
invited_user = invite_response.json()["data"]
invite_token = invite_response.json()["data"]["token"]
# Activate user via reset password
response = requests.get(
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{invited_user['id']}"
),
headers={"Authorization": f"Bearer {admin_token}"},
# accept the invite for editor
accept_payload = {
"token": invite_token,
"password": "password123Z$",
}
accept_response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json=accept_payload,
timeout=2,
)
assert response.status_code == HTTPStatus.OK
reset_token = response.json()["data"]["token"]
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": USER_EDITOR_PASSWORD, "token": reset_token},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
assert accept_response.status_code == HTTPStatus.CREATED
# Login with editor email and password
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)