mirror of
https://github.com/SigNoz/signoz.git
synced 2026-03-02 20:12:08 +00:00
Compare commits
9 Commits
refactor/c
...
testing-fe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eea594af3b | ||
|
|
dbcd1a598e | ||
|
|
00ce5a91ea | ||
|
|
4b84b715b4 | ||
|
|
b3c08ec417 | ||
|
|
036f362fa6 | ||
|
|
6a9a910eb6 | ||
|
|
4cd7489280 | ||
|
|
ff0736532d |
@@ -6167,6 +6167,10 @@ paths:
|
|||||||
name: searchText
|
name: searchText
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
|
- in: query
|
||||||
|
name: source
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
responses:
|
responses:
|
||||||
"200":
|
"200":
|
||||||
content:
|
content:
|
||||||
|
|||||||
@@ -80,32 +80,9 @@ Do not define an interface before you have at least two concrete implementations
|
|||||||
|
|
||||||
The exception is interfaces required for testing (e.g., for mocking an external dependency). In that case, define the interface in the **consuming** package, not the providing package, following the Go convention of [accepting interfaces and returning structs](https://go.dev/wiki/CodeReviewComments#interfaces).
|
The exception is interfaces required for testing (e.g., for mocking an external dependency). In that case, define the interface in the **consuming** package, not the providing package, following the Go convention of [accepting interfaces and returning structs](https://go.dev/wiki/CodeReviewComments#interfaces).
|
||||||
|
|
||||||
### 6. Wrappers must add semantics, not just rename
|
|
||||||
|
|
||||||
A wrapper type is justified when it adds meaning, validation, or invariants that the underlying type does not carry. It is not justified when it merely renames fields or reorganizes the same data into a different shape.
|
|
||||||
|
|
||||||
```go
|
|
||||||
// Justified: adds validation that the underlying string does not carry.
|
|
||||||
type OrgID struct{ value string }
|
|
||||||
func NewOrgID(s string) (OrgID, error) { /* validates format */ }
|
|
||||||
|
|
||||||
// Not justified: renames fields with no new invariant or behavior.
|
|
||||||
type UserInfo struct {
|
|
||||||
Name string // same as source.Name
|
|
||||||
Email string // same as source.Email
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Ask: what does the wrapper guarantee that the underlying type does not? If the answer is nothing, use the underlying type directly.
|
|
||||||
|
|
||||||
## When a new type IS warranted
|
## When a new type IS warranted
|
||||||
|
|
||||||
A new type earns its place when it meets **at least one** of these criteria:
|
See [Types](types.md#when-a-new-type-is-warranted) for the criteria that justify introducing a new type.
|
||||||
|
|
||||||
- **Serialization boundary**: It must be persisted, sent over the wire, or written to config. The source type is unsuitable (unexported fields, function pointers, cycles).
|
|
||||||
- **Invariant enforcement**: The constructor or methods enforce constraints that raw data does not carry (e.g., non-empty, validated format, bounded range).
|
|
||||||
- **Multiple distinct consumers**: Three or more call sites use the type in meaningfully different ways. The type is the shared vocabulary between them.
|
|
||||||
- **Dependency firewall**: The type lives in a lightweight package so that consumers avoid importing a heavy dependency.
|
|
||||||
|
|
||||||
## What should I remember?
|
## What should I remember?
|
||||||
|
|
||||||
|
|||||||
@@ -49,6 +49,43 @@ Follow these rules:
|
|||||||
|
|
||||||
5. **Test files stay alongside source**: Unit tests go in `_test.go` files next to the code they test, in the same package.
|
5. **Test files stay alongside source**: Unit tests go in `_test.go` files next to the code they test, in the same package.
|
||||||
|
|
||||||
|
## How should I order code within a file?
|
||||||
|
|
||||||
|
Within a single `.go` file, declarations should follow this order:
|
||||||
|
|
||||||
|
1. Constants
|
||||||
|
2. Variables
|
||||||
|
3. Types (structs, interfaces)
|
||||||
|
4. Constructor functions (`New...`)
|
||||||
|
5. Exported methods and functions
|
||||||
|
6. Unexported methods and functions
|
||||||
|
|
||||||
|
```go
|
||||||
|
// 1. Constants
|
||||||
|
const defaultTimeout = 30 * time.Second
|
||||||
|
|
||||||
|
// 2. Variables
|
||||||
|
var ErrNotFound = errors.New(errors.TypeNotFound, errors.CodeNotFound, "resource not found")
|
||||||
|
|
||||||
|
// 3. Types
|
||||||
|
type Store struct {
|
||||||
|
db *sql.DB
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Constructors
|
||||||
|
func NewStore(db *sql.DB) *Store {
|
||||||
|
return &Store{db: db}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Exported methods
|
||||||
|
func (s *Store) Get(ctx context.Context, id string) (*Resource, error) { ... }
|
||||||
|
|
||||||
|
// 6. Unexported methods
|
||||||
|
func (s *Store) buildQuery(id string) string { ... }
|
||||||
|
```
|
||||||
|
|
||||||
|
This ordering makes files predictable. A reader scanning from top to bottom sees the contract (constants, types, constructors) before the implementation (methods), and exported behavior before internal helpers.
|
||||||
|
|
||||||
## How should I name symbols?
|
## How should I name symbols?
|
||||||
|
|
||||||
### Exported symbols
|
### Exported symbols
|
||||||
@@ -90,9 +127,7 @@ Never introduce circular imports. If package A needs package B and B needs A, ex
|
|||||||
|
|
||||||
## Where do shared types go?
|
## Where do shared types go?
|
||||||
|
|
||||||
Most types belong in `pkg/types/` under a domain-specific sub-package (e.g., `pkg/types/ruletypes`, `pkg/types/authtypes`).
|
See [Types](types.md) for full conventions on type placement, naming variants, composition, and constructors.
|
||||||
|
|
||||||
Do not put domain logic in `pkg/types/`. Only data structures, constants, and simple methods.
|
|
||||||
|
|
||||||
## How do I merge or move packages?
|
## How do I merge or move packages?
|
||||||
|
|
||||||
@@ -105,6 +140,10 @@ When two packages are tightly coupled (one imports the other's constants, they c
|
|||||||
5. Delete the old packages. Do not leave behind re-export shims.
|
5. Delete the old packages. Do not leave behind re-export shims.
|
||||||
6. Verify with `go build ./...`, `go test ./<new-pkg>/...`, and `go vet ./...`.
|
6. Verify with `go build ./...`, `go test ./<new-pkg>/...`, and `go vet ./...`.
|
||||||
|
|
||||||
|
## When should I use valuer types?
|
||||||
|
|
||||||
|
See [Types](types.md#typed-domain-values-pkgvaluer) for valuer types, when to use them, and the enum pattern using `valuer.String`.
|
||||||
|
|
||||||
## When should I add documentation?
|
## When should I add documentation?
|
||||||
|
|
||||||
Add a `doc.go` with a package-level comment for any package that is non-trivial or has multiple consumers. Keep it to 1–3 sentences:
|
Add a `doc.go` with a package-level comment for any package that is non-trivial or has multiple consumers. Keep it to 1–3 sentences:
|
||||||
@@ -119,6 +158,10 @@ package cache
|
|||||||
|
|
||||||
- Package names are domain-specific and lowercase. Never generic names like `util` or `common`.
|
- Package names are domain-specific and lowercase. Never generic names like `util` or `common`.
|
||||||
- The file matching the package name (e.g., `cache.go`) defines the public interface. Implementation details go elsewhere.
|
- The file matching the package name (e.g., `cache.go`) defines the public interface. Implementation details go elsewhere.
|
||||||
|
- Within a file, order declarations: constants, variables, types, constructors, exported functions, unexported functions.
|
||||||
|
- Segregate types across files by responsibility. A file with 5 unrelated types is harder to navigate than 5 files with one type each.
|
||||||
|
- Use valuer types (`valuer.String`, `valuer.Email`, `valuer.UUID`, `valuer.TextDuration`) for domain values that need validation, normalization, or cross-boundary serialization. See [Types](types.md#typed-domain-values-pkgvaluer) for details.
|
||||||
|
- Avoid `init()` functions. If you need to initialize a variable, use a package-level `var` with a function call or a `sync.Once`. `init()` hides execution order, makes testing harder, and has caused subtle bugs in large codebases.
|
||||||
- Never introduce circular imports. Extract shared types into `pkg/types/` when needed.
|
- Never introduce circular imports. Extract shared types into `pkg/types/` when needed.
|
||||||
- Watch for symbol name collisions when merging packages, prefix to disambiguate.
|
- Watch for symbol name collisions when merging packages, prefix to disambiguate.
|
||||||
- Put test helpers in a `{pkg}test/` sub-package, not in the main package.
|
- Put test helpers in a `{pkg}test/` sub-package, not in the main package.
|
||||||
|
|||||||
@@ -8,7 +8,31 @@ We adhere to three primary style guides as our foundation:
|
|||||||
- [Code Review Comments](https://go.dev/wiki/CodeReviewComments) - For understanding common comments in code reviews
|
- [Code Review Comments](https://go.dev/wiki/CodeReviewComments) - For understanding common comments in code reviews
|
||||||
- [Google Style Guide](https://google.github.io/styleguide/go/) - Additional practices from Google
|
- [Google Style Guide](https://google.github.io/styleguide/go/) - Additional practices from Google
|
||||||
|
|
||||||
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development. In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package:
|
We **recommend** (almost enforce) reviewing these guides before contributing to the codebase. They provide valuable insights into writing idiomatic Go code and will help you understand our approach to backend development.
|
||||||
|
|
||||||
|
**Discover before inventing.** Before writing new code, search the codebase for existing solutions. SigNoz has established patterns for common problems: `pkg/valuer` for typed domain values, `pkg/errors` for structured errors, `pkg/factory` for provider wiring, `{pkg}test/` sub-packages for test helpers, and shared fixtures for integration tests. Duplicating what already exists creates drift and maintenance burden. When you find an existing pattern, use it. When you don't find one, check with the maintainers before building your own.
|
||||||
|
|
||||||
|
## How to approach a feature
|
||||||
|
|
||||||
|
Building a feature is not one task, it is a sequence of concerns that build on each other. Work through them in this order:
|
||||||
|
|
||||||
|
1. **Domain design (types).** Define the types that represent your domain. What are the entities, what are their relationships, what are the constraints? This is where you decide your data model. Get this right first because everything else depends on it. See [Packages](packages.md) and [Abstractions](abstractions.md).
|
||||||
|
|
||||||
|
2. **Structure (services / modules / handlers).** Place your code in the right layer given the current infrastructure. If the current structure does not work for your feature, that is the time to open a discussion and write a technical document, not to silently reshape things in the same PR. See [Handler](handler.md) and [Provider](provider.md).
|
||||||
|
|
||||||
|
3. **HTTP endpoints (paths, status codes, errors).** Pay close attention to detail here. Paths, methods, request/response shapes, status codes, error codes. These are the contract with consumers and are expensive to change after release. See [Endpoint](endpoint.md) and [Handler](handler.md).
|
||||||
|
|
||||||
|
4. **Database constraints (org_id, foreign keys, migrations).** Ensure org scoping, schema consistency, and migration correctness. See [SQL](sql.md).
|
||||||
|
|
||||||
|
5. **Business logic (module layer).** With the types, structure, endpoints, and storage in place, the focus narrows to the actual logic. This is where review should concentrate on correctness, edge cases, and error handling.
|
||||||
|
|
||||||
|
This ordering also gives you a natural way to split PRs. Each layer affects a different area and requires a different lens for review. A PR that mixes refactoring with new feature logic is hard to review and risky to ship. Separate them.
|
||||||
|
|
||||||
|
For large refactors or features that touch multiple subsystems, write a short technical document outlining the design and get relevant stakeholders aligned before starting implementation. This saves significant back-and-forth during review.
|
||||||
|
|
||||||
|
## Area-specific guides
|
||||||
|
|
||||||
|
In addition, we have a few additional rules that make certain areas stricter than the above which can be found in area-specific files in this package:
|
||||||
|
|
||||||
- [Abstractions](abstractions.md) - When to introduce new types and intermediate representations
|
- [Abstractions](abstractions.md) - When to introduce new types and intermediate representations
|
||||||
- [Errors](errors.md) - Structured error handling
|
- [Errors](errors.md) - Structured error handling
|
||||||
@@ -20,3 +44,5 @@ We **recommend** (almost enforce) reviewing these guides before contributing to
|
|||||||
- [Packages](packages.md) - Naming, layout, and conventions for `pkg/` packages
|
- [Packages](packages.md) - Naming, layout, and conventions for `pkg/` packages
|
||||||
- [Service](service.md) - Managed service lifecycle with `factory.Service`
|
- [Service](service.md) - Managed service lifecycle with `factory.Service`
|
||||||
- [SQL](sql.md) - Database and SQL patterns
|
- [SQL](sql.md) - Database and SQL patterns
|
||||||
|
- [Testing](testing.md) - Writing tests that catch bugs without becoming a maintenance burden
|
||||||
|
- [Types](types.md) - Type placement, naming variants, composition, and constructors
|
||||||
|
|||||||
260
docs/contributing/go/testing.md
Normal file
260
docs/contributing/go/testing.md
Normal file
@@ -0,0 +1,260 @@
|
|||||||
|
# Testing
|
||||||
|
|
||||||
|
This document provides rules for writing tests that catch real bugs and do not become a maintenance burden. It covers both how to write good tests and how to recognize bad ones.
|
||||||
|
|
||||||
|
## Why we write tests
|
||||||
|
|
||||||
|
Tests exist to give confidence that the system behaves correctly. A good test suite lets you change code and know immediately (or in a reasonable time) whether you broke something. A bad test suite lets you change code (and then spend hours figuring out whether the failures are real) and still lets the bugs slip in.
|
||||||
|
|
||||||
|
Every test should be written to answer one question: **if this test fails, does that mean a user-visible behavior is broken?** If the answer is no, reconsider whether the test should exist.
|
||||||
|
|
||||||
|
Not all tests are equal. Different scopes serve different purposes, and the balance matters.
|
||||||
|
|
||||||
|
- **Unit tests**: Fast, focused, test a single function or type in isolation. These form the foundation. They should run in milliseconds, have no I/O, and be fully deterministic.
|
||||||
|
- **Integration tests**: Verify that components work together against real dependencies (ClickHouse, PostgreSQL, etc.). Slower, but catch problems that unit tests cannot: real query behavior, configuration issues, serialization mismatches.
|
||||||
|
- **End-to-end tests**: Validate full system behavior from the outside. Expensive to write and maintain, but necessary for critical user flows.
|
||||||
|
|
||||||
|
When a test can be written at a smaller scope, prefer the smaller scope. But do not force a unit test where an integration test is the natural fit.
|
||||||
|
|
||||||
|
## What to test
|
||||||
|
|
||||||
|
### Test behaviors, not implementations
|
||||||
|
|
||||||
|
A test should verify what the code does, not how it does it (unless the goal of the test is specifically how something happen). If you can refactor the internals of a function e.g, change a query, rename a variable, restructure the logic and no user-visible behavior changes, no test should break.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Good: tests the behavior "given this input, expect this output."
|
||||||
|
func TestDiscountApplied(t *testing.T) {
|
||||||
|
order := NewOrder(item("widget", 100))
|
||||||
|
order.ApplyDiscount(10)
|
||||||
|
assert.Equal(t, 90, order.Total())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bad: tests the implementation "did it call the right internal method?"
|
||||||
|
func TestDiscountApplied(t *testing.T) {
|
||||||
|
mockPricer := new(MockPricer)
|
||||||
|
mockPricer.On("CalculateDiscount", 100, 10).Return(90)
|
||||||
|
order := NewOrder(item("widget", 100), WithPricer(mockPricer))
|
||||||
|
order.ApplyDiscount(10)
|
||||||
|
mockPricer.AssertCalled(t, "CalculateDiscount", 100, 10)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The first test survives a refactoring of how discounts are calculated. The second test breaks the moment you rename the method, change its signature, or inline the logic.
|
||||||
|
|
||||||
|
**The refactoring test**: before committing a test, ask if someone refactors the internals tomorrow without changing any behavior, will this test break? If yes, consider updating the test.
|
||||||
|
|
||||||
|
### Output format as behavior
|
||||||
|
|
||||||
|
Some functions exist specifically to produce a formatted output: a query builder generates SQL, a serializer generates JSON, a code generator produces source code. In these cases, the output string *is* the behavior and asserting on it is valid and necessary. The function's contract is the exact output it produces.
|
||||||
|
|
||||||
|
This is different from testing a function that *uses* a query internally. If a function's job is to fetch data from a database, the query it sends is an implementation detail and the returned data is the behavior. If its job is to *build* a query for someone else to execute, the query string is the behavior.
|
||||||
|
|
||||||
|
The distinction: **is the formatted output the function's product, or the function's mechanism?** Test the product, not the mechanism.
|
||||||
|
|
||||||
|
### Test at the public API boundary
|
||||||
|
|
||||||
|
Write tests against the exported functions and methods that consumers actually call. Do not test unexported helpers directly. If an unexported function has complex logic worth testing, that is a signal it should be extracted into its own package with its own public API.
|
||||||
|
|
||||||
|
### Test edge cases and error paths
|
||||||
|
|
||||||
|
The most valuable tests cover the cases that are easy to get wrong:
|
||||||
|
|
||||||
|
- Empty inputs, nil inputs, zero values.
|
||||||
|
- Boundary conditions (off-by-one, first element, last element).
|
||||||
|
- Error conditions (what happens when the dependency fails?).
|
||||||
|
- Concurrent access, if the code is designed for it.
|
||||||
|
|
||||||
|
A test for the happy path of a trivial function adds little value. A test for the error path of a complex function prevents real bugs.
|
||||||
|
|
||||||
|
### The Beyonce Rule
|
||||||
|
|
||||||
|
"If you liked it, then you should have put a test on it." Any behavior you want to preserve such as correctness, performance characteristics, security constraints, error handling should be covered by a test. If it breaks and there is no test, that is not a regression; it is an untested assumption.
|
||||||
|
|
||||||
|
## How to write a test
|
||||||
|
|
||||||
|
### Structure: arrange, act, assert
|
||||||
|
|
||||||
|
Every test should have three clearly separated sections:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func TestTransferInsufficientFunds(t *testing.T) {
|
||||||
|
// Arrange: set up the preconditions.
|
||||||
|
from := NewAccount(50)
|
||||||
|
to := NewAccount(0)
|
||||||
|
|
||||||
|
// Act: perform the operation being tested.
|
||||||
|
err := Transfer(from, to, 100)
|
||||||
|
|
||||||
|
// Assert: verify the outcome.
|
||||||
|
require.Error(t, err)
|
||||||
|
assert.Equal(t, 50, from.Balance())
|
||||||
|
assert.Equal(t, 0, to.Balance())
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Do not interleave setup and assertions. Do not put assertions in helper functions that also perform setup. Keep the three sections visually distinct.
|
||||||
|
|
||||||
|
### One behavior per test
|
||||||
|
|
||||||
|
Each test function should verify one behavior. If a test name needs "and" in it, split it into two tests.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Good: one behavior per test.
|
||||||
|
func TestParseValidInput(t *testing.T) { ... }
|
||||||
|
func TestParseEmptyInput(t *testing.T) { ... }
|
||||||
|
func TestParseMalformedInput(t *testing.T) { ... }
|
||||||
|
|
||||||
|
// Bad: multiple behaviors in one test.
|
||||||
|
func TestParse(t *testing.T) {
|
||||||
|
// test valid input
|
||||||
|
// test empty input
|
||||||
|
// test malformed input
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Table-driven tests are fine when the behavior is the same and only the inputs/outputs vary.
|
||||||
|
|
||||||
|
### Name tests after behaviors
|
||||||
|
|
||||||
|
Test names should describe the scenario and the expected outcome, not the function being tested.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Good: describes the behavior.
|
||||||
|
func TestWithdrawal_InsufficientFunds_ReturnsError(t *testing.T)
|
||||||
|
func TestWithdrawal_ZeroBalance_ReturnsError(t *testing.T)
|
||||||
|
|
||||||
|
// Bad: describes the function.
|
||||||
|
func TestWithdraw(t *testing.T)
|
||||||
|
func TestWithdrawError(t *testing.T)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Eliminate logic in tests
|
||||||
|
|
||||||
|
Tests should be straight-line code. No `if`, no `for`, no `switch`. If you feel the need to add control flow to a test, either split it into multiple tests or restructure the test data.
|
||||||
|
|
||||||
|
A test with logic in it needs its own tests. That is a sign something has gone wrong.
|
||||||
|
|
||||||
|
### Write clear failure messages
|
||||||
|
|
||||||
|
When a test fails, the failure message should tell you what went wrong without reading the test source.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Good: failure message explains the context.
|
||||||
|
assert.Equal(t, expected, actual, "discount should be applied to order total")
|
||||||
|
|
||||||
|
// Bad: failure message is just the default.
|
||||||
|
assert.Equal(t, expected, actual)
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `require` for preconditions that must hold for the rest of the test to make sense. Use `assert` for the actual verifications. This avoids cascading failures from a single root cause.
|
||||||
|
|
||||||
|
## How to recognize a bad test
|
||||||
|
|
||||||
|
A bad test costs more to maintain than the bugs it prevents. Learning to identify bad tests is as important as learning to write good ones. Always evaluate a test critically before commiting it.
|
||||||
|
|
||||||
|
### Tests that duplicate the implementation
|
||||||
|
|
||||||
|
If a test contains the same logic as the code it tests, it verifies nothing. It will pass when the code is wrong in the same way the test is wrong, and it will break whenever the code changes even if the change is correct.
|
||||||
|
|
||||||
|
A common form: mocking a database, setting up canned rows, calling a function that queries and scans those rows, then asserting that the function returned exactly those rows. The test encodes the query, the row structure, and the scan logic. The same things the production code does. If the function has no branching logic beyond "query and scan," this test is a mirror of the implementation, not a check on it. An integration test against a real database verifies the actual behavior; the mock-based test verifies that the code matches the test author's expectations of the code.
|
||||||
|
|
||||||
|
### Tests for functions with no interesting logic
|
||||||
|
|
||||||
|
Not every function needs a test. A function that prepares a query, sends it, and scans the result has no branching, no edge cases, and no logic that could be wrong independently of the query being correct. Unit-testing it means mocking the database, which means the test does not verify the query works. It only verifies the function calls the mock in the expected way.
|
||||||
|
|
||||||
|
Ask: **what bug would this test catch that would not be caught by the integration test or by the tests of the calling code?** If the answer is nothing, skip the unit test. A missing test is better than a test that provides false confidence.
|
||||||
|
|
||||||
|
### Tests that rebuild the dependency boundary
|
||||||
|
|
||||||
|
When a test creates an in-package mock of an external interface (database driver, HTTP client, file system) and that mock contains non-trivial logic (reflection-based scanning, response simulation, state machines), the test is now testing its own mock as much as the production code. Bugs in the mock produce false passes or false failures, and the mock must be maintained alongside the real dependency.
|
||||||
|
|
||||||
|
If the mock is complex enough to have its own bugs, you have rebuilt the dependency boundary rather than testing against it. Use the real dependency (via integration test) or use a well-maintained fake provided by the dependency's authors.
|
||||||
|
|
||||||
|
### Tests that exist for coverage
|
||||||
|
|
||||||
|
A test that exercises a function without meaningfully verifying its output adds coverage without adding confidence. Calling a type-conversion function with every numeric type and asserting it does not panic covers lines but does not catch regressions. The function would need to be rewritten to fail, and any such rewrite would be caught by the callers' tests.
|
||||||
|
|
||||||
|
Before writing a test, identify the specific failure mode it guards against. If you cannot name one, the test is not worth writing.
|
||||||
|
|
||||||
|
### Tests that test the language
|
||||||
|
|
||||||
|
Do not test that language type system, standard library, or well-known third-party libraries work correctly. Testing that `reflect.Kind` returns the right value for each type, that pointer dereferencing works, or that a type switch dispatches correctly adds maintenance burden without catching any plausible bug in your code.
|
||||||
|
|
||||||
|
## Brittle tests
|
||||||
|
|
||||||
|
A brittle test is one that fails when production code changes without an actual bug being introduced. Brittle tests are expensive: they slow down development, train people to ignore failures, and provide no real safety net. Common sources of brittleness:
|
||||||
|
|
||||||
|
- **Asserting on implementation details**: Verifying which internal methods were called, in what order, or with what intermediate values. If the method is renamed or the order changes but the output is the same, the test breaks for no reason.
|
||||||
|
- **Asserting on serialized representations when the format is not the contract**: Matching exact SQL strings, JSON output, or log messages produced by a function whose job is not to produce that format.
|
||||||
|
- **Over-constrained mocks**: Setting up a mock that expects specific arguments in a specific sequence. Any refactoring of the call pattern breaks the mock setup even if behavior is preserved.
|
||||||
|
- **Shared mutable state**: Tests that depend on data left behind by other tests. A change in execution order or a new test case causes unrelated failures.
|
||||||
|
- **Time-dependence**: Tests that use `time.Now()`, `time.Sleep()`, or real timers. These produce flaky results and break under load.
|
||||||
|
|
||||||
|
When you encounter a brittle test, fix or delete it. Do not work around it.
|
||||||
|
|
||||||
|
## DAMP
|
||||||
|
|
||||||
|
Test code should prioritize clarity (DAMP: Descriptive And Meaningful Phrases).
|
||||||
|
|
||||||
|
```go
|
||||||
|
// DAMP: each test is self-contained and readable.
|
||||||
|
func TestCreateUser(t *testing.T) {
|
||||||
|
user := User{Name: "Alice", Email: "alice@example.com"}
|
||||||
|
err := store.Create(ctx, user)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreateDuplicateUser(t *testing.T) {
|
||||||
|
user := User{Name: "Alice", Email: "alice@example.com"}
|
||||||
|
_ = store.Create(ctx, user)
|
||||||
|
err := store.Create(ctx, user)
|
||||||
|
assert.ErrorIs(t, err, ErrAlreadyExists)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Shared setup helpers are fine for constructing objects with sensible defaults. But each test should explicitly set the values it depends on rather than relying on hidden defaults in a shared fixture.
|
||||||
|
|
||||||
|
## Flaky tests
|
||||||
|
|
||||||
|
A flaky test is one that sometimes passes and sometimes fails without any code change. Flaky tests erode trust in the entire suite. Once people learn to re-run and ignore failures, real bugs slip through.
|
||||||
|
|
||||||
|
Common causes and fixes:
|
||||||
|
|
||||||
|
- **Timing and sleeps**: Replace `time.Sleep` with channels, condition variables, or polling with a timeout.
|
||||||
|
- **Uncontrolled concurrency**: Use deterministic synchronization rather than relying on goroutine scheduling.
|
||||||
|
- **Shared state between tests**: Each test should set up and tear down its own state.
|
||||||
|
|
||||||
|
If a test is flaky and you cannot fix the root cause quickly, skip or delete it. A skipped test with an explanation is better than a flaky test that trains everyone to ignore red builds.
|
||||||
|
|
||||||
|
## Code coverage
|
||||||
|
|
||||||
|
Code coverage measures which lines were executed, not whether the code is correct. A function that is called but whose output is never checked has 100% coverage and 0% verification.
|
||||||
|
|
||||||
|
Do not use coverage as a target to hit. Use it as a tool to find gaps such as untested error paths, unreachable branches, dead code. A codebase with 60% meaningful coverage is better than one with 95% coverage achieved by testing trivial getters.
|
||||||
|
|
||||||
|
## Tests are code
|
||||||
|
|
||||||
|
Tests must be maintained and they are not second-class citizen. You should apply the same standards for readability, naming, and structure that you apply to production code. We do not tolerate complexity in tests just because they are tests.
|
||||||
|
|
||||||
|
However, tests should be simpler than production code. If a test requires its own helper library, complex setup, or nested control flow, step back and ask whether you are testing the right thing at the right level. This is not a blanket rule but a prompt to pause, assess the situation, and check whether the complexity is justified.
|
||||||
|
|
||||||
|
## What should I remember?
|
||||||
|
|
||||||
|
- If refactoring internals breaks your test but no behavior changed, the test is likely bad. Delete it or consider updating it.
|
||||||
|
- Test what the code does, not how it does it. Verify outputs and state, not method calls.
|
||||||
|
- Output format is behavior when the function's job is to produce that format. It is not behavior when the function uses it internally.
|
||||||
|
- Ask what specific bug this test catches. If you cannot name one, do not write it.
|
||||||
|
- Always evaluate whether the test adds confidence, not just lines.
|
||||||
|
- One behavior per test. Name it after the scenario, not the function.
|
||||||
|
- No logic in tests. Straight-line code only.
|
||||||
|
- Flaky tests are not acceptable. Fix the root cause or nuke the test code.
|
||||||
|
- Coverage measures execution, not correctness.
|
||||||
|
|
||||||
|
## Mandatory reading
|
||||||
|
|
||||||
|
- What to look for in a code review: Tests - https://google.github.io/eng-practices/review/reviewer/looking-for.html#tests
|
||||||
|
- Testing Overview - https://abseil.io/resources/swe-book/html/ch11.html
|
||||||
|
- Unit Testing - https://abseil.io/resources/swe-book/html/ch12.html
|
||||||
|
- Test Doubles - https://abseil.io/resources/swe-book/html/ch13.html
|
||||||
|
- Larger Testing - https://abseil.io/resources/swe-book/html/ch14.html
|
||||||
272
docs/contributing/go/types.md
Normal file
272
docs/contributing/go/types.md
Normal file
@@ -0,0 +1,272 @@
|
|||||||
|
# Types
|
||||||
|
|
||||||
|
This guide covers how types are organised, named, constructed, and composed so you can add new ones consistently.
|
||||||
|
|
||||||
|
## Where do types live?
|
||||||
|
|
||||||
|
Types live in `pkg/types/` and its sub-packages:
|
||||||
|
|
||||||
|
```
|
||||||
|
pkg/types/
|
||||||
|
├── auditable.go # TimeAuditable, UserAuditable
|
||||||
|
├── identity.go # Identifiable (UUID primary key)
|
||||||
|
├── user.go # User, PostableRegisterOrgAndAdmin, UserStore
|
||||||
|
├── alertmanagertypes/ # Alert manager domain types
|
||||||
|
│ ├── channel.go
|
||||||
|
│ ├── receiver.go
|
||||||
|
│ └── config.go
|
||||||
|
├── authtypes/ # Auth domain types
|
||||||
|
└── ruletypes/ # Rule domain types
|
||||||
|
└── maintenance.go
|
||||||
|
```
|
||||||
|
|
||||||
|
Follow these rules:
|
||||||
|
|
||||||
|
1. **Embeddable building blocks** go in `pkg/types/` directly `Identifiable`, `TimeAuditable`, `UserAuditable`.
|
||||||
|
2. **Domain-specific types** go in a sub-package named `pkg/types/<domain>types/` (e.g., `alertmanagertypes`, `ruletypes`, `authtypes`).
|
||||||
|
3. **No domain logic** in type packages. Only data structures, constants, and simple methods. Domain services import from type packages, not the other way around.
|
||||||
|
4. **Domain services import types, not vice versa.** If a type needs a service, the design is likely wrong and you should restructure so the service operates on the type.
|
||||||
|
|
||||||
|
## Type variants
|
||||||
|
|
||||||
|
A domain entity often has multiple representations depending on where it appears in the system. We use naming prefixes to distinguish them:
|
||||||
|
|
||||||
|
| Prefix | Purpose | Example |
|
||||||
|
|---|---|---|
|
||||||
|
| `Postable<Type>` | API request input | `PostableRegisterOrgAndAdmin` |
|
||||||
|
| `Gettable<Type>` | API response output | `GettablePlannedMaintenance` |
|
||||||
|
| `Storable<Type>` | Database model (embeds `bun.BaseModel`) | `StorablePlannedMaintenance` |
|
||||||
|
| Plain `<Type>` | Domain logic type | `User` |
|
||||||
|
|
||||||
|
Not every entity needs all four variants. Start with the plain type and add variants only when the API or database representation genuinely differs.
|
||||||
|
|
||||||
|
Here is a concrete example from `pkg/types/ruletypes/maintenance.go`:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Database model embeds bun.BaseModel and composition types
|
||||||
|
type StorablePlannedMaintenance struct {
|
||||||
|
bun.BaseModel `bun:"table:planned_maintenance"`
|
||||||
|
types.Identifiable
|
||||||
|
types.TimeAuditable
|
||||||
|
types.UserAuditable
|
||||||
|
Name string `bun:"name,type:text,notnull"`
|
||||||
|
Description string `bun:"description,type:text"`
|
||||||
|
Schedule *Schedule `bun:"schedule,type:text,notnull"`
|
||||||
|
OrgID string `bun:"org_id,type:text"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// API response: flat struct with JSON tags, computed fields like Status
|
||||||
|
type GettablePlannedMaintenance struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Description string `json:"description"`
|
||||||
|
Schedule *Schedule `json:"schedule"`
|
||||||
|
RuleIDs []string `json:"alertIds"`
|
||||||
|
CreatedAt time.Time `json:"createdAt"`
|
||||||
|
CreatedBy string `json:"createdBy"`
|
||||||
|
UpdatedAt time.Time `json:"updatedAt"`
|
||||||
|
UpdatedBy string `json:"updatedBy"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
Kind string `json:"kind"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
When the API shape exactly matches the domain type, use a type alias instead of duplicating fields:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// From pkg/types/user.go
|
||||||
|
type GettableUser = User
|
||||||
|
```
|
||||||
|
|
||||||
|
## Composition via embedding
|
||||||
|
|
||||||
|
`pkg/types/` provides small, reusable structs that you embed into your domain types:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// pkg/types/identity.go
|
||||||
|
type Identifiable struct {
|
||||||
|
ID valuer.UUID `json:"id" bun:"id,pk,type:text"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// pkg/types/auditable.go
|
||||||
|
type TimeAuditable struct {
|
||||||
|
CreatedAt time.Time `bun:"created_at" json:"createdAt"`
|
||||||
|
UpdatedAt time.Time `bun:"updated_at" json:"updatedAt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UserAuditable struct {
|
||||||
|
CreatedBy string `bun:"created_by,type:text" json:"createdBy"`
|
||||||
|
UpdatedBy string `bun:"updated_by,type:text" json:"updatedBy"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Compose them in a database model:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type StorablePlannedMaintenance struct {
|
||||||
|
bun.BaseModel `bun:"table:planned_maintenance"`
|
||||||
|
types.Identifiable // adds ID (UUID primary key)
|
||||||
|
types.TimeAuditable // adds CreatedAt, UpdatedAt
|
||||||
|
types.UserAuditable // adds CreatedBy, UpdatedBy
|
||||||
|
Name string `bun:"name,type:text,notnull"`
|
||||||
|
Description string `bun:"description,type:text"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
See [SQL](sql.md) for full database patterns including migrations and queries.
|
||||||
|
|
||||||
|
## Constructors
|
||||||
|
|
||||||
|
Constructors validate inputs and return a ready-to-use value:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// New<Type> validates and returns a pointer + error
|
||||||
|
func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUID) (*User, error) {
|
||||||
|
if email.IsZero() {
|
||||||
|
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required")
|
||||||
|
}
|
||||||
|
if role == "" {
|
||||||
|
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "role is required")
|
||||||
|
}
|
||||||
|
if orgID.IsZero() {
|
||||||
|
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &User{
|
||||||
|
Identifiable: Identifiable{ID: valuer.GenerateUUID()},
|
||||||
|
DisplayName: displayName,
|
||||||
|
Email: email,
|
||||||
|
Role: role,
|
||||||
|
OrgID: orgID,
|
||||||
|
TimeAuditable: TimeAuditable{CreatedAt: time.Now(), UpdatedAt: time.Now()},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Follow these conventions:
|
||||||
|
|
||||||
|
- **`New<Type>(args) (*Type, error)`**: validates inputs, returns an error on failure. Use this in production code.
|
||||||
|
- **Validation at construction**: check required fields, format constraints, and invariants in the constructor. Callers should not need to validate after construction.
|
||||||
|
- **Generate IDs internally**: constructors call `valuer.GenerateUUID()` callers do not pass IDs in.
|
||||||
|
- **Set timestamps internally**: constructors set `CreatedAt` and `UpdatedAt` to `time.Now()`.
|
||||||
|
|
||||||
|
## Typed domain values (`pkg/valuer/`)
|
||||||
|
|
||||||
|
The `pkg/valuer` package provides typed wrappers for common domain values. These types carry validation, normalization, and consistent serialization (JSON, SQL, text) that raw Go primitives do not.
|
||||||
|
|
||||||
|
| Type | Wraps | Invariant |
|
||||||
|
|---|---|---|
|
||||||
|
| `valuer.UUID` | `google/uuid.UUID` | Valid UUIDv7, generated via `GenerateUUID()` |
|
||||||
|
| `valuer.Email` | `string` | Valid email format, lowercased and trimmed |
|
||||||
|
| `valuer.String` | `string` | Lowercased and trimmed |
|
||||||
|
| `valuer.TextDuration` | `time.Duration` | Valid duration, text-serializable |
|
||||||
|
|
||||||
|
### When to use a valuer type
|
||||||
|
|
||||||
|
Use a valuer type instead of a raw primitive when the value represents a domain concept with any of:
|
||||||
|
|
||||||
|
- **Enums**: All enums in the codebase must be backed by `valuer.String`. Do not use raw `string` constants or `iota`-based `int` enums. A struct embedding `valuer.String` with predefined variables gives you normalization, serialization, and an `Enum()` method for OpenAPI schema generation in one place.
|
||||||
|
- **Validation**: emails must match a format, UUIDs must be parseable, durations must be valid.
|
||||||
|
- **Normalization**: `valuer.String` lowercases and trims input, so comparisons are consistent throughout the system.
|
||||||
|
- **Serialization boundary**: the value is stored in a database, sent over the wire, or bound from an HTTP parameter. Valuer types implement `Scan`, `Value`, `MarshalJSON`, `UnmarshalJSON`, and `UnmarshalParam` consistently.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Wrong: raw string constant with no validation or normalization.
|
||||||
|
const SignalTraces = "traces"
|
||||||
|
|
||||||
|
// Right: valuer-backed type that normalizes and serializes consistently.
|
||||||
|
type Signal struct {
|
||||||
|
valuer.String
|
||||||
|
}
|
||||||
|
|
||||||
|
var SignalTraces = Signal{valuer.NewString("traces")}
|
||||||
|
```
|
||||||
|
|
||||||
|
Only primitive domain types that serve as shared infrastructure belong in `pkg/valuer`. If you need a new base type (like `Email` or `TextDuration`) that multiple packages will embed for validation and serialization, add it there. Domain-specific types that build on top of a valuer (like `Signal` embedding `valuer.String`) belong in their own domain package, not in `pkg/valuer`.
|
||||||
|
|
||||||
|
### The `Valuer` interface
|
||||||
|
|
||||||
|
Every valuer type implements the `Valuer` interface, which gives you serialization for free:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Valuer interface {
|
||||||
|
IsZero() bool // check for zero value
|
||||||
|
StringValue() string // raw string representation
|
||||||
|
fmt.Stringer // String() for printing
|
||||||
|
json.Marshaler / json.Unmarshaler // JSON
|
||||||
|
sql.Scanner / driver.Valuer // database
|
||||||
|
encoding.TextMarshaler / TextUnmarshaler // text
|
||||||
|
ginbinding.BindUnmarshaler // HTTP query/path params
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Use them in struct fields:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type User struct {
|
||||||
|
Identifiable
|
||||||
|
Email valuer.Email `bun:"email" json:"email"`
|
||||||
|
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Wrappers must add semantics, not just rename
|
||||||
|
|
||||||
|
A wrapper type is justified when it adds meaning, validation, or invariants that the underlying type does not carry. It is not justified when it merely renames fields or reorganizes the same data into a different shape.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Justified: adds validation that the underlying string does not carry.
|
||||||
|
type OrgID struct{ value string }
|
||||||
|
func NewOrgID(s string) (OrgID, error) { /* validates format */ }
|
||||||
|
|
||||||
|
// Not justified: renames fields with no new invariant or behavior.
|
||||||
|
type UserInfo struct {
|
||||||
|
Name string // same as source.Name
|
||||||
|
Email string // same as source.Email
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Ask: what does the wrapper guarantee that the underlying type does not? If the answer is nothing, use the underlying type directly.
|
||||||
|
|
||||||
|
## When a new type IS warranted
|
||||||
|
|
||||||
|
A new type earns its place when it meets **at least one** of these criteria:
|
||||||
|
|
||||||
|
- **Serialization boundary**: It must be persisted, sent over the wire, or written to config. The source type is unsuitable (unexported fields, function pointers, cycles).
|
||||||
|
- **Invariant enforcement**: The constructor or methods enforce constraints that raw data does not carry (e.g., non-empty, validated format, bounded range).
|
||||||
|
- **Multiple distinct consumers**: Three or more call sites use the type in meaningfully different ways. The type is the shared vocabulary between them.
|
||||||
|
- **Dependency firewall**: The type lives in a lightweight package so that consumers avoid importing a heavy dependency.
|
||||||
|
|
||||||
|
See [Abstractions](abstractions.md) for the full set of rules on when abstractions are and aren't justified.
|
||||||
|
|
||||||
|
## Store interfaces
|
||||||
|
|
||||||
|
Each domain type package defines a store interface for persistence. The store interface lives alongside the types it operates on:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// From pkg/types/ruletypes/maintenance.go
|
||||||
|
type MaintenanceStore interface {
|
||||||
|
CreatePlannedMaintenance(context.Context, GettablePlannedMaintenance) (valuer.UUID, error)
|
||||||
|
DeletePlannedMaintenance(context.Context, valuer.UUID) error
|
||||||
|
GetPlannedMaintenanceByID(context.Context, valuer.UUID) (*GettablePlannedMaintenance, error)
|
||||||
|
EditPlannedMaintenance(context.Context, GettablePlannedMaintenance, valuer.UUID) error
|
||||||
|
GetAllPlannedMaintenance(context.Context, string) ([]*GettablePlannedMaintenance, error)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Conventions:
|
||||||
|
|
||||||
|
- Name the interface `<Domain>Store` (e.g., `UserStore`, `MaintenanceStore`).
|
||||||
|
- Accept `context.Context` as the first parameter.
|
||||||
|
- Use typed values (`valuer.UUID`, `valuer.Email`) instead of raw strings for identifiers.
|
||||||
|
- Implementations go in separate packages (e.g., `sqlstore/`), see [SQL](sql.md) for details.
|
||||||
|
|
||||||
|
## What should I remember?
|
||||||
|
|
||||||
|
- Shared types live in `pkg/types/`, domain types in `pkg/types/<domain>types/`.
|
||||||
|
- No domain logic in type packages only data structures, constants, and simple methods.
|
||||||
|
- Use `Storable`, `Gettable`, `Postable` prefixes when API or database representation differs from the domain type.
|
||||||
|
- Embed `Identifiable`, `TimeAuditable`, and `UserAuditable` for standard fields instead of repeating them.
|
||||||
|
- Constructors (`New<Type>`) validate, generate IDs, and set timestamps.
|
||||||
|
- Use `pkg/valuer/` types instead of raw strings for domain identifiers like UUIDs and emails.
|
||||||
|
- Store interfaces live alongside the types they persist and use `context.Context` as the first parameter.
|
||||||
@@ -3451,6 +3451,11 @@ export type ListMetricsParams = {
|
|||||||
* @description undefined
|
* @description undefined
|
||||||
*/
|
*/
|
||||||
searchText?: string;
|
searchText?: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
* @description undefined
|
||||||
|
*/
|
||||||
|
source?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ListMetrics200 = {
|
export type ListMetrics200 = {
|
||||||
|
|||||||
@@ -60,11 +60,30 @@
|
|||||||
gap: 8px;
|
gap: 8px;
|
||||||
|
|
||||||
margin-left: 108px;
|
margin-left: 108px;
|
||||||
|
position: relative;
|
||||||
|
|
||||||
|
/* Vertical dashed line connecting query elements */
|
||||||
|
&::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: -28px;
|
||||||
|
top: 0;
|
||||||
|
bottom: 0;
|
||||||
|
width: 1px;
|
||||||
|
background: repeating-linear-gradient(
|
||||||
|
to bottom,
|
||||||
|
#1d212d,
|
||||||
|
#1d212d 4px,
|
||||||
|
transparent 4px,
|
||||||
|
transparent 8px
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
.code-mirror-where-clause,
|
.code-mirror-where-clause,
|
||||||
.query-aggregation-container,
|
.query-aggregation-container,
|
||||||
.query-add-ons,
|
.query-add-ons,
|
||||||
.metrics-aggregation-section-content {
|
.metrics-aggregation-section-content,
|
||||||
|
.metrics-container {
|
||||||
position: relative;
|
position: relative;
|
||||||
|
|
||||||
&::before {
|
&::before {
|
||||||
@@ -102,6 +121,10 @@
|
|||||||
.qb-elements-container {
|
.qb-elements-container {
|
||||||
margin-left: 0px;
|
margin-left: 0px;
|
||||||
|
|
||||||
|
&::after {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
.code-mirror-where-clause,
|
.code-mirror-where-clause,
|
||||||
.query-aggregation-container,
|
.query-aggregation-container,
|
||||||
.query-add-ons,
|
.query-add-ons,
|
||||||
@@ -333,28 +356,7 @@
|
|||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
|
|
||||||
&::before {
|
&::before {
|
||||||
content: '';
|
display: none;
|
||||||
height: 120px;
|
|
||||||
content: '';
|
|
||||||
position: absolute;
|
|
||||||
left: 0;
|
|
||||||
top: 31px;
|
|
||||||
bottom: 0;
|
|
||||||
width: 1px;
|
|
||||||
background: repeating-linear-gradient(
|
|
||||||
to bottom,
|
|
||||||
#1d212d,
|
|
||||||
#1d212d 4px,
|
|
||||||
transparent 4px,
|
|
||||||
transparent 8px
|
|
||||||
);
|
|
||||||
left: 15px;
|
|
||||||
}
|
|
||||||
|
|
||||||
&.has-trace-operator {
|
|
||||||
&::before {
|
|
||||||
height: 0px;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -462,10 +464,21 @@
|
|||||||
|
|
||||||
.qb-content-section {
|
.qb-content-section {
|
||||||
.qb-elements-container {
|
.qb-elements-container {
|
||||||
|
&::after {
|
||||||
|
background: repeating-linear-gradient(
|
||||||
|
to bottom,
|
||||||
|
var(--bg-vanilla-300),
|
||||||
|
var(--bg-vanilla-300) 4px,
|
||||||
|
transparent 4px,
|
||||||
|
transparent 8px
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
.code-mirror-where-clause,
|
.code-mirror-where-clause,
|
||||||
.query-aggregation-container,
|
.query-aggregation-container,
|
||||||
.query-add-ons,
|
.query-add-ons,
|
||||||
.metrics-aggregation-section-content {
|
.metrics-aggregation-section-content,
|
||||||
|
.metrics-container {
|
||||||
&::before {
|
&::before {
|
||||||
border-left: 6px dotted var(--bg-vanilla-300);
|
border-left: 6px dotted var(--bg-vanilla-300);
|
||||||
}
|
}
|
||||||
@@ -529,18 +542,6 @@
|
|||||||
|
|
||||||
.qb-entity-options {
|
.qb-entity-options {
|
||||||
.options {
|
.options {
|
||||||
.query-name {
|
|
||||||
&::before {
|
|
||||||
background: repeating-linear-gradient(
|
|
||||||
to bottom,
|
|
||||||
var(--bg-vanilla-300),
|
|
||||||
var(--bg-vanilla-300) 4px,
|
|
||||||
transparent 4px,
|
|
||||||
transparent 8px
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.formula-name {
|
.formula-name {
|
||||||
&::before {
|
&::before {
|
||||||
background: repeating-linear-gradient(
|
background: repeating-linear-gradient(
|
||||||
|
|||||||
@@ -207,6 +207,7 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
|||||||
queryVariant={config?.queryVariant || 'dropdown'}
|
queryVariant={config?.queryVariant || 'dropdown'}
|
||||||
showOnlyWhereClause={showOnlyWhereClause}
|
showOnlyWhereClause={showOnlyWhereClause}
|
||||||
isListViewPanel={isListViewPanel}
|
isListViewPanel={isListViewPanel}
|
||||||
|
signalSource={currentQuery.builder.queryData[0].source as 'meter' | ''}
|
||||||
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
onSignalSourceChange={onSignalSourceChange || ((): void => {})}
|
||||||
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
signalSourceChangeEnabled={signalSourceChangeEnabled}
|
||||||
queriesCount={1}
|
queriesCount={1}
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
import { memo, useCallback, useMemo, useState } from 'react';
|
import { memo, useCallback, useMemo } from 'react';
|
||||||
import { Select } from 'antd';
|
import { Select } from 'antd';
|
||||||
import {
|
import {
|
||||||
initialQueriesMap,
|
initialQueriesMap,
|
||||||
initialQueryMeterWithType,
|
initialQueryMeterWithType,
|
||||||
PANEL_TYPES,
|
PANEL_TYPES,
|
||||||
} from 'constants/queryBuilder';
|
} from 'constants/queryBuilder';
|
||||||
import { AggregatorFilter } from 'container/QueryBuilder/filters';
|
import { MetricNameSelector } from 'container/QueryBuilder/filters';
|
||||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
|
||||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
import { SelectOption } from 'types/common/select';
|
import { SelectOption } from 'types/common/select';
|
||||||
@@ -44,21 +43,12 @@ export const MetricsSelect = memo(function MetricsSelect({
|
|||||||
signalSourceChangeEnabled: boolean;
|
signalSourceChangeEnabled: boolean;
|
||||||
savePreviousQuery: boolean;
|
savePreviousQuery: boolean;
|
||||||
}): JSX.Element {
|
}): JSX.Element {
|
||||||
const [attributeKeys, setAttributeKeys] = useState<BaseAutocompleteData[]>([]);
|
|
||||||
|
|
||||||
const { handleChangeAggregatorAttribute } = useQueryOperations({
|
const { handleChangeAggregatorAttribute } = useQueryOperations({
|
||||||
index,
|
index,
|
||||||
query,
|
query,
|
||||||
entityVersion: version,
|
entityVersion: version,
|
||||||
});
|
});
|
||||||
|
|
||||||
const handleAggregatorAttributeChange = useCallback(
|
|
||||||
(value: BaseAutocompleteData, isEditMode?: boolean) => {
|
|
||||||
handleChangeAggregatorAttribute(value, isEditMode, attributeKeys || []);
|
|
||||||
},
|
|
||||||
[handleChangeAggregatorAttribute, attributeKeys],
|
|
||||||
);
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
updateAllQueriesOperators,
|
updateAllQueriesOperators,
|
||||||
handleSetQueryData,
|
handleSetQueryData,
|
||||||
@@ -164,12 +154,10 @@ export const MetricsSelect = memo(function MetricsSelect({
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<AggregatorFilter
|
<MetricNameSelector
|
||||||
onChange={handleAggregatorAttributeChange}
|
onChange={handleChangeAggregatorAttribute}
|
||||||
query={query}
|
query={query}
|
||||||
index={index}
|
|
||||||
signalSource={signalSource || ''}
|
signalSource={signalSource || ''}
|
||||||
setAttributeKeys={setAttributeKeys}
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -202,8 +202,8 @@ function QueryAddOns({
|
|||||||
} else {
|
} else {
|
||||||
filteredAddOns = Object.values(ADD_ONS);
|
filteredAddOns = Object.values(ADD_ONS);
|
||||||
|
|
||||||
// Filter out group_by for metrics data source
|
|
||||||
if (query.dataSource === DataSource.METRICS) {
|
if (query.dataSource === DataSource.METRICS) {
|
||||||
|
// Filter out group_by for metrics data source (handled in MetricsAggregateSection)
|
||||||
filteredAddOns = filteredAddOns.filter(
|
filteredAddOns = filteredAddOns.filter(
|
||||||
(addOn) => addOn.key !== ADD_ONS_KEYS.GROUP_BY,
|
(addOn) => addOn.key !== ADD_ONS_KEYS.GROUP_BY,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ jest.mock(
|
|||||||
);
|
);
|
||||||
jest.mock('container/QueryBuilder/filters', () => ({
|
jest.mock('container/QueryBuilder/filters', () => ({
|
||||||
AggregatorFilter: (): JSX.Element => <div />,
|
AggregatorFilter: (): JSX.Element => <div />,
|
||||||
|
MetricNameSelector: (): JSX.Element => <div />,
|
||||||
}));
|
}));
|
||||||
// Mock hooks
|
// Mock hooks
|
||||||
jest.mock('hooks/queryBuilder/useQueryBuilder');
|
jest.mock('hooks/queryBuilder/useQueryBuilder');
|
||||||
|
|||||||
@@ -29,7 +29,6 @@ export enum LOCALSTORAGE {
|
|||||||
DONT_SHOW_SLOW_API_WARNING = 'DONT_SHOW_SLOW_API_WARNING',
|
DONT_SHOW_SLOW_API_WARNING = 'DONT_SHOW_SLOW_API_WARNING',
|
||||||
METRICS_LIST_OPTIONS = 'METRICS_LIST_OPTIONS',
|
METRICS_LIST_OPTIONS = 'METRICS_LIST_OPTIONS',
|
||||||
SHOW_EXCEPTIONS_QUICK_FILTERS = 'SHOW_EXCEPTIONS_QUICK_FILTERS',
|
SHOW_EXCEPTIONS_QUICK_FILTERS = 'SHOW_EXCEPTIONS_QUICK_FILTERS',
|
||||||
BANNER_DISMISSED = 'BANNER_DISMISSED',
|
|
||||||
QUICK_FILTERS_SETTINGS_ANNOUNCEMENT = 'QUICK_FILTERS_SETTINGS_ANNOUNCEMENT',
|
QUICK_FILTERS_SETTINGS_ANNOUNCEMENT = 'QUICK_FILTERS_SETTINGS_ANNOUNCEMENT',
|
||||||
FUNNEL_STEPS = 'FUNNEL_STEPS',
|
FUNNEL_STEPS = 'FUNNEL_STEPS',
|
||||||
SPAN_DETAILS_PINNED_ATTRIBUTES = 'SPAN_DETAILS_PINNED_ATTRIBUTES',
|
SPAN_DETAILS_PINNED_ATTRIBUTES = 'SPAN_DETAILS_PINNED_ATTRIBUTES',
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
// ** Helpers
|
// ** Helpers
|
||||||
|
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||||
import { createIdFromObjectFields } from 'lib/createIdFromObjectFields';
|
import { createIdFromObjectFields } from 'lib/createIdFromObjectFields';
|
||||||
import { createNewBuilderItemName } from 'lib/newQueryBuilder/createNewBuilderItemName';
|
import { createNewBuilderItemName } from 'lib/newQueryBuilder/createNewBuilderItemName';
|
||||||
import { IAttributeValuesResponse } from 'types/api/queryBuilder/getAttributesValues';
|
import { IAttributeValuesResponse } from 'types/api/queryBuilder/getAttributesValues';
|
||||||
@@ -177,7 +178,7 @@ export const initialQueryBuilderFormValues: IBuilderQuery = {
|
|||||||
{
|
{
|
||||||
metricName: '',
|
metricName: '',
|
||||||
temporality: '',
|
temporality: '',
|
||||||
timeAggregation: MetricAggregateOperator.COUNT,
|
timeAggregation: MetricAggregateOperator.AVG,
|
||||||
spaceAggregation: MetricAggregateOperator.SUM,
|
spaceAggregation: MetricAggregateOperator.SUM,
|
||||||
reduceTo: ReduceOperators.AVG,
|
reduceTo: ReduceOperators.AVG,
|
||||||
},
|
},
|
||||||
@@ -225,7 +226,7 @@ export const initialQueryBuilderFormMeterValues: IBuilderQuery = {
|
|||||||
{
|
{
|
||||||
metricName: '',
|
metricName: '',
|
||||||
temporality: '',
|
temporality: '',
|
||||||
timeAggregation: MeterAggregateOperator.COUNT,
|
timeAggregation: MeterAggregateOperator.AVG,
|
||||||
spaceAggregation: MeterAggregateOperator.SUM,
|
spaceAggregation: MeterAggregateOperator.SUM,
|
||||||
reduceTo: ReduceOperators.AVG,
|
reduceTo: ReduceOperators.AVG,
|
||||||
},
|
},
|
||||||
@@ -371,6 +372,31 @@ export enum ATTRIBUTE_TYPES {
|
|||||||
EXPONENTIAL_HISTOGRAM = 'ExponentialHistogram',
|
EXPONENTIAL_HISTOGRAM = 'ExponentialHistogram',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const METRIC_TYPE_TO_ATTRIBUTE_TYPE: Record<
|
||||||
|
MetrictypesTypeDTO,
|
||||||
|
ATTRIBUTE_TYPES
|
||||||
|
> = {
|
||||||
|
[MetrictypesTypeDTO.sum]: ATTRIBUTE_TYPES.SUM,
|
||||||
|
[MetrictypesTypeDTO.gauge]: ATTRIBUTE_TYPES.GAUGE,
|
||||||
|
[MetrictypesTypeDTO.histogram]: ATTRIBUTE_TYPES.HISTOGRAM,
|
||||||
|
[MetrictypesTypeDTO.summary]: ATTRIBUTE_TYPES.GAUGE,
|
||||||
|
[MetrictypesTypeDTO.exponentialhistogram]:
|
||||||
|
ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function toAttributeType(
|
||||||
|
metricType: MetrictypesTypeDTO | undefined,
|
||||||
|
isMonotonic?: boolean,
|
||||||
|
): ATTRIBUTE_TYPES | '' {
|
||||||
|
if (!metricType) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
if (metricType === MetrictypesTypeDTO.sum && isMonotonic === false) {
|
||||||
|
return ATTRIBUTE_TYPES.GAUGE;
|
||||||
|
}
|
||||||
|
return METRIC_TYPE_TO_ATTRIBUTE_TYPE[metricType] || '';
|
||||||
|
}
|
||||||
|
|
||||||
export type IQueryBuilderState = 'search';
|
export type IQueryBuilderState = 'search';
|
||||||
|
|
||||||
export const QUERY_BUILDER_SEARCH_VALUES = {
|
export const QUERY_BUILDER_SEARCH_VALUES = {
|
||||||
|
|||||||
@@ -441,7 +441,7 @@ describe('Footer utils', () => {
|
|||||||
reduceTo: undefined,
|
reduceTo: undefined,
|
||||||
spaceAggregation: 'sum',
|
spaceAggregation: 'sum',
|
||||||
temporality: undefined,
|
temporality: undefined,
|
||||||
timeAggregation: 'count',
|
timeAggregation: 'avg',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
disabled: false,
|
disabled: false,
|
||||||
|
|||||||
@@ -25,51 +25,6 @@
|
|||||||
background: var(--bg-slate-500);
|
background: var(--bg-slate-500);
|
||||||
}
|
}
|
||||||
|
|
||||||
.home-container-banner {
|
|
||||||
position: relative;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: row;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
padding: 8px 12px;
|
|
||||||
width: 100%;
|
|
||||||
background-color: var(--bg-robin-500);
|
|
||||||
|
|
||||||
.home-container-banner-close {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
cursor: pointer;
|
|
||||||
color: var(--bg-vanilla-100);
|
|
||||||
|
|
||||||
position: absolute;
|
|
||||||
right: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.home-container-banner-content {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
gap: 4px;
|
|
||||||
color: var(--bg-vanilla-100);
|
|
||||||
font-family: Inter;
|
|
||||||
font-size: 12px;
|
|
||||||
font-style: normal;
|
|
||||||
font-weight: 400;
|
|
||||||
line-height: 20px;
|
|
||||||
|
|
||||||
.home-container-banner-link {
|
|
||||||
color: var(--bg-vanilla-100);
|
|
||||||
font-size: 12px;
|
|
||||||
font-style: normal;
|
|
||||||
font-weight: 400;
|
|
||||||
line-height: 20px;
|
|
||||||
letter-spacing: -0.07px;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.home-header-left {
|
.home-header-left {
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||||
import { useMutation, useQuery } from 'react-query';
|
import { useMutation, useQuery } from 'react-query';
|
||||||
import { Color } from '@signozhq/design-tokens';
|
import { Color } from '@signozhq/design-tokens';
|
||||||
|
import { Compass, Dot, House, Plus, Wrench } from '@signozhq/icons';
|
||||||
import { Button, Popover } from 'antd';
|
import { Button, Popover } from 'antd';
|
||||||
import logEvent from 'api/common/logEvent';
|
import logEvent from 'api/common/logEvent';
|
||||||
import listUserPreferences from 'api/v1/user/preferences/list';
|
import listUserPreferences from 'api/v1/user/preferences/list';
|
||||||
import updateUserPreferenceAPI from 'api/v1/user/preferences/name/update';
|
import updateUserPreferenceAPI from 'api/v1/user/preferences/name/update';
|
||||||
import Header from 'components/Header/Header';
|
import Header from 'components/Header/Header';
|
||||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
|
||||||
import { ORG_PREFERENCES } from 'constants/orgPreferences';
|
import { ORG_PREFERENCES } from 'constants/orgPreferences';
|
||||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||||
@@ -15,10 +15,8 @@ import ROUTES from 'constants/routes';
|
|||||||
import { getMetricsListQuery } from 'container/MetricsExplorer/Summary/utils';
|
import { getMetricsListQuery } from 'container/MetricsExplorer/Summary/utils';
|
||||||
import { useGetMetricsList } from 'hooks/metricsExplorer/useGetMetricsList';
|
import { useGetMetricsList } from 'hooks/metricsExplorer/useGetMetricsList';
|
||||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
|
||||||
import history from 'lib/history';
|
import history from 'lib/history';
|
||||||
import cloneDeep from 'lodash-es/cloneDeep';
|
import cloneDeep from 'lodash-es/cloneDeep';
|
||||||
import { CompassIcon, DotIcon, HomeIcon, Plus, Wrench, X } from 'lucide-react';
|
|
||||||
import { AnimatePresence } from 'motion/react';
|
import { AnimatePresence } from 'motion/react';
|
||||||
import * as motion from 'motion/react-client';
|
import * as motion from 'motion/react-client';
|
||||||
import Card from 'periscope/components/Card/Card';
|
import Card from 'periscope/components/Card/Card';
|
||||||
@@ -51,8 +49,6 @@ export default function Home(): JSX.Element {
|
|||||||
const [updatingUserPreferences, setUpdatingUserPreferences] = useState(false);
|
const [updatingUserPreferences, setUpdatingUserPreferences] = useState(false);
|
||||||
const [loadingUserPreferences, setLoadingUserPreferences] = useState(true);
|
const [loadingUserPreferences, setLoadingUserPreferences] = useState(true);
|
||||||
|
|
||||||
const { isCommunityUser, isCommunityEnterpriseUser } = useGetTenantLicense();
|
|
||||||
|
|
||||||
const [checklistItems, setChecklistItems] = useState<ChecklistItem[]>(
|
const [checklistItems, setChecklistItems] = useState<ChecklistItem[]>(
|
||||||
defaultChecklistItemsState,
|
defaultChecklistItemsState,
|
||||||
);
|
);
|
||||||
@@ -61,13 +57,6 @@ export default function Home(): JSX.Element {
|
|||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
const [isBannerDismissed, setIsBannerDismissed] = useState(false);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const bannerDismissed = localStorage.getItem(LOCALSTORAGE.BANNER_DISMISSED);
|
|
||||||
setIsBannerDismissed(bannerDismissed === 'true');
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const now = new Date();
|
const now = new Date();
|
||||||
const startTime = new Date(now.getTime() - homeInterval);
|
const startTime = new Date(now.getTime() - homeInterval);
|
||||||
@@ -298,44 +287,13 @@ export default function Home(): JSX.Element {
|
|||||||
logEvent('Homepage: Visited', {});
|
logEvent('Homepage: Visited', {});
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const hideBanner = (): void => {
|
|
||||||
localStorage.setItem(LOCALSTORAGE.BANNER_DISMISSED, 'true');
|
|
||||||
setIsBannerDismissed(true);
|
|
||||||
};
|
|
||||||
|
|
||||||
const showBanner = useMemo(
|
|
||||||
() => !isBannerDismissed && (isCommunityUser || isCommunityEnterpriseUser),
|
|
||||||
[isBannerDismissed, isCommunityUser, isCommunityEnterpriseUser],
|
|
||||||
);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="home-container">
|
<div className="home-container">
|
||||||
<div className="sticky-header">
|
<div className="sticky-header">
|
||||||
{showBanner && (
|
|
||||||
<div className="home-container-banner">
|
|
||||||
<div className="home-container-banner-content">
|
|
||||||
Big News: SigNoz Community Edition now available with SSO (Google OAuth)
|
|
||||||
and API keys -
|
|
||||||
<a
|
|
||||||
href="https://signoz.io/blog/open-source-signoz-now-available-with-sso-and-api-keys/"
|
|
||||||
target="_blank"
|
|
||||||
rel="noreferrer"
|
|
||||||
className="home-container-banner-link"
|
|
||||||
>
|
|
||||||
<i>read more</i>
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="home-container-banner-close">
|
|
||||||
<X size={16} onClick={hideBanner} />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<Header
|
<Header
|
||||||
leftComponent={
|
leftComponent={
|
||||||
<div className="home-header-left">
|
<div className="home-header-left">
|
||||||
<HomeIcon size={14} /> Home
|
<House size={14} /> Home
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
rightComponent={
|
rightComponent={
|
||||||
@@ -400,7 +358,7 @@ export default function Home(): JSX.Element {
|
|||||||
<div className="active-ingestion-card-content-container">
|
<div className="active-ingestion-card-content-container">
|
||||||
<div className="active-ingestion-card-content">
|
<div className="active-ingestion-card-content">
|
||||||
<div className="active-ingestion-card-content-icon">
|
<div className="active-ingestion-card-content-icon">
|
||||||
<DotIcon size={16} color={Color.BG_FOREST_500} />
|
<Dot size={16} color={Color.BG_FOREST_500} />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="active-ingestion-card-content-description">
|
<div className="active-ingestion-card-content-description">
|
||||||
@@ -427,7 +385,7 @@ export default function Home(): JSX.Element {
|
|||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<CompassIcon size={12} />
|
<Compass size={12} />
|
||||||
Explore Logs
|
Explore Logs
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -441,7 +399,7 @@ export default function Home(): JSX.Element {
|
|||||||
<div className="active-ingestion-card-content-container">
|
<div className="active-ingestion-card-content-container">
|
||||||
<div className="active-ingestion-card-content">
|
<div className="active-ingestion-card-content">
|
||||||
<div className="active-ingestion-card-content-icon">
|
<div className="active-ingestion-card-content-icon">
|
||||||
<DotIcon size={16} color={Color.BG_FOREST_500} />
|
<Dot size={16} color={Color.BG_FOREST_500} />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="active-ingestion-card-content-description">
|
<div className="active-ingestion-card-content-description">
|
||||||
@@ -468,7 +426,7 @@ export default function Home(): JSX.Element {
|
|||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<CompassIcon size={12} />
|
<Compass size={12} />
|
||||||
Explore Traces
|
Explore Traces
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -482,7 +440,7 @@ export default function Home(): JSX.Element {
|
|||||||
<div className="active-ingestion-card-content-container">
|
<div className="active-ingestion-card-content-container">
|
||||||
<div className="active-ingestion-card-content">
|
<div className="active-ingestion-card-content">
|
||||||
<div className="active-ingestion-card-content-icon">
|
<div className="active-ingestion-card-content-icon">
|
||||||
<DotIcon size={16} color={Color.BG_FOREST_500} />
|
<Dot size={16} color={Color.BG_FOREST_500} />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="active-ingestion-card-content-description">
|
<div className="active-ingestion-card-content-description">
|
||||||
@@ -509,7 +467,7 @@ export default function Home(): JSX.Element {
|
|||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<CompassIcon size={12} />
|
<Compass size={12} />
|
||||||
Explore Metrics
|
Explore Metrics
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import { isAxiosError } from 'axios';
|
|||||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||||
import { initialQueryMeterWithType, PANEL_TYPES } from 'constants/queryBuilder';
|
import { initialQueryMeterWithType, PANEL_TYPES } from 'constants/queryBuilder';
|
||||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||||
|
import EmptyMetricsSearch from 'container/MetricsExplorer/Explorer/EmptyMetricsSearch';
|
||||||
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters/BuilderUnitsFilter';
|
import { BuilderUnitsFilter } from 'container/QueryBuilder/filters/BuilderUnitsFilter';
|
||||||
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
|
import TimeSeriesView from 'container/TimeSeriesView/TimeSeriesView';
|
||||||
import { convertDataValueToMs } from 'container/TimeSeriesView/utils';
|
import { convertDataValueToMs } from 'container/TimeSeriesView/utils';
|
||||||
@@ -115,27 +116,34 @@ function TimeSeries(): JSX.Element {
|
|||||||
setYAxisUnit(value);
|
setYAxisUnit(value);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const hasMetricSelected = useMemo(
|
||||||
|
() => currentQuery.builder.queryData.some((q) => q.aggregateAttribute?.key),
|
||||||
|
[currentQuery],
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="meter-time-series-container">
|
<div className="meter-time-series-container">
|
||||||
<BuilderUnitsFilter onChange={onUnitChangeHandler} yAxisUnit={yAxisUnit} />
|
<BuilderUnitsFilter onChange={onUnitChangeHandler} yAxisUnit={yAxisUnit} />
|
||||||
<div className="time-series-container">
|
<div className="time-series-container">
|
||||||
{responseData.map((datapoint, index) => (
|
{!hasMetricSelected && <EmptyMetricsSearch />}
|
||||||
<div
|
{hasMetricSelected &&
|
||||||
className="time-series-view-panel"
|
responseData.map((datapoint, index) => (
|
||||||
// eslint-disable-next-line react/no-array-index-key
|
<div
|
||||||
key={index}
|
className="time-series-view-panel"
|
||||||
>
|
// eslint-disable-next-line react/no-array-index-key
|
||||||
<TimeSeriesView
|
key={index}
|
||||||
isFilterApplied={false}
|
>
|
||||||
isError={queries[index].isError}
|
<TimeSeriesView
|
||||||
isLoading={queries[index].isLoading}
|
isFilterApplied={false}
|
||||||
data={datapoint}
|
isError={queries[index].isError}
|
||||||
dataSource={DataSource.METRICS}
|
isLoading={queries[index].isLoading}
|
||||||
yAxisUnit={yAxisUnit}
|
data={datapoint}
|
||||||
panelType={PANEL_TYPES.BAR}
|
dataSource={DataSource.METRICS}
|
||||||
/>
|
yAxisUnit={yAxisUnit}
|
||||||
</div>
|
panelType={PANEL_TYPES.BAR}
|
||||||
))}
|
/>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,13 +1,21 @@
|
|||||||
import { Typography } from 'antd';
|
import { Typography } from 'antd';
|
||||||
import { Empty } from 'antd/lib';
|
import { Empty } from 'antd/lib';
|
||||||
|
|
||||||
export default function EmptyMetricsSearch(): JSX.Element {
|
interface EmptyMetricsSearchProps {
|
||||||
|
hasQueryResult?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function EmptyMetricsSearch({
|
||||||
|
hasQueryResult,
|
||||||
|
}: EmptyMetricsSearchProps): JSX.Element {
|
||||||
return (
|
return (
|
||||||
<div className="empty-metrics-search">
|
<div className="empty-metrics-search">
|
||||||
<Empty
|
<Empty
|
||||||
description={
|
description={
|
||||||
<Typography.Title level={5}>
|
<Typography.Title level={5}>
|
||||||
Please build and run a valid query to see the result
|
{hasQueryResult
|
||||||
|
? 'No data'
|
||||||
|
: 'Select a metric and run a query to see the results'}
|
||||||
</Typography.Title>
|
</Typography.Title>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ function Explorer(): JSX.Element {
|
|||||||
!isMetricUnitsLoading &&
|
!isMetricUnitsLoading &&
|
||||||
!isMetricUnitsError &&
|
!isMetricUnitsError &&
|
||||||
units.length > 0 &&
|
units.length > 0 &&
|
||||||
units.every((unit) => unit && unit === units[0]),
|
units.every((unit) => unit === units[0]),
|
||||||
[units, isMetricUnitsLoading, isMetricUnitsError],
|
[units, isMetricUnitsLoading, isMetricUnitsError],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
|||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||||
|
|
||||||
|
import EmptyMetricsSearch from './EmptyMetricsSearch';
|
||||||
import { TimeSeriesProps } from './types';
|
import { TimeSeriesProps } from './types';
|
||||||
import {
|
import {
|
||||||
buildUpdateMetricYAxisUnitPayload,
|
buildUpdateMetricYAxisUnitPayload,
|
||||||
@@ -209,7 +210,7 @@ function TimeSeries({
|
|||||||
{showSaveUnitButton && (
|
{showSaveUnitButton && (
|
||||||
<div className="save-unit-container">
|
<div className="save-unit-container">
|
||||||
<Typography.Text>
|
<Typography.Text>
|
||||||
Save the selected unit for this metric?
|
Set the selected unit as the metric unit?
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
<Button
|
<Button
|
||||||
type="primary"
|
type="primary"
|
||||||
@@ -229,64 +230,71 @@ function TimeSeries({
|
|||||||
'time-series-container': changeLayoutForOneChartPerQuery,
|
'time-series-container': changeLayoutForOneChartPerQuery,
|
||||||
})}
|
})}
|
||||||
>
|
>
|
||||||
{responseData.map((datapoint, index) => {
|
{metricNames.length === 0 && <EmptyMetricsSearch />}
|
||||||
const isQueryDataItem = index < metricNames.length;
|
{metricNames.length > 0 &&
|
||||||
const metricName = isQueryDataItem ? metricNames[index] : undefined;
|
responseData.map((datapoint, index) => {
|
||||||
const metricUnit = isQueryDataItem ? metricUnits[index] : undefined;
|
const isQueryDataItem = index < metricNames.length;
|
||||||
|
const metricName = isQueryDataItem ? metricNames[index] : undefined;
|
||||||
|
const metricUnit = isQueryDataItem ? metricUnits[index] : undefined;
|
||||||
|
|
||||||
// Show the no unit warning if -
|
// Show the no unit warning if -
|
||||||
// 1. The metric query is not loading
|
// 1. The metric query is not loading
|
||||||
// 2. The metric units are not loading
|
// 2. The metric units are not loading
|
||||||
// 3. There are more than one metric
|
// 3. There are more than one metric
|
||||||
// 4. The current metric unit is empty
|
// 4. The current metric unit is empty
|
||||||
// 5. Is a queryData item
|
// 5. Is a queryData item
|
||||||
const isMetricUnitEmpty =
|
const isMetricUnitEmpty =
|
||||||
isQueryDataItem &&
|
isQueryDataItem &&
|
||||||
!queries[index].isLoading &&
|
!queries[index].isLoading &&
|
||||||
!isMetricUnitsLoading &&
|
!isMetricUnitsLoading &&
|
||||||
metricUnits.length > 1 &&
|
metricUnits.length > 1 &&
|
||||||
!metricUnit &&
|
!metricUnit &&
|
||||||
metricName;
|
metricName;
|
||||||
|
|
||||||
const currentYAxisUnit = yAxisUnit || metricUnit;
|
const currentYAxisUnit = yAxisUnit || metricUnit;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className="time-series-view"
|
className="time-series-view"
|
||||||
// eslint-disable-next-line react/no-array-index-key
|
// eslint-disable-next-line react/no-array-index-key
|
||||||
key={index}
|
key={index}
|
||||||
>
|
>
|
||||||
{isMetricUnitEmpty && metricName && (
|
{isMetricUnitEmpty && metricName && (
|
||||||
<Tooltip
|
<Tooltip
|
||||||
className="no-unit-warning"
|
className="no-unit-warning"
|
||||||
title={
|
title={
|
||||||
<Typography.Text>
|
<Typography.Text>
|
||||||
This metric does not have a unit. Please set one for it in the{' '}
|
No unit is set for this metric. You can assign one from the{' '}
|
||||||
<Typography.Link
|
<Typography.Link
|
||||||
onClick={(): void => handleOpenMetricDetails(metricName)}
|
onClick={(): void => handleOpenMetricDetails(metricName)}
|
||||||
>
|
>
|
||||||
metric details
|
metric details
|
||||||
</Typography.Link>{' '}
|
</Typography.Link>{' '}
|
||||||
page.
|
page.
|
||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<AlertTriangle size={16} color={Color.BG_AMBER_400} />
|
<AlertTriangle
|
||||||
</Tooltip>
|
size={16}
|
||||||
)}
|
color={Color.BG_AMBER_400}
|
||||||
<TimeSeriesView
|
role="img"
|
||||||
isFilterApplied={false}
|
aria-label="no unit warning"
|
||||||
isError={queries[index].isError}
|
/>
|
||||||
isLoading={queries[index].isLoading || isMetricUnitsLoading}
|
</Tooltip>
|
||||||
data={datapoint}
|
)}
|
||||||
yAxisUnit={currentYAxisUnit}
|
<TimeSeriesView
|
||||||
dataSource={DataSource.METRICS}
|
isFilterApplied={false}
|
||||||
error={queries[index].error as APIError}
|
isError={queries[index].isError}
|
||||||
setWarning={setWarning}
|
isLoading={queries[index].isLoading || isMetricUnitsLoading}
|
||||||
/>
|
data={datapoint}
|
||||||
</div>
|
yAxisUnit={currentYAxisUnit}
|
||||||
);
|
dataSource={DataSource.METRICS}
|
||||||
})}
|
error={queries[index].error as APIError}
|
||||||
|
setWarning={setWarning}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -0,0 +1,19 @@
|
|||||||
|
import { render, screen } from '@testing-library/react';
|
||||||
|
|
||||||
|
import EmptyMetricsSearch from '../EmptyMetricsSearch';
|
||||||
|
|
||||||
|
describe('EmptyMetricsSearch', () => {
|
||||||
|
it('shows select metric message when no query has been run', () => {
|
||||||
|
render(<EmptyMetricsSearch />);
|
||||||
|
|
||||||
|
expect(
|
||||||
|
screen.getByText('Select a metric and run a query to see the results'),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows no data message when a query returned empty results', () => {
|
||||||
|
render(<EmptyMetricsSearch hasQueryResult />);
|
||||||
|
|
||||||
|
expect(screen.getByText('No data')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -8,7 +8,7 @@ import {
|
|||||||
MetrictypesTemporalityDTO,
|
MetrictypesTemporalityDTO,
|
||||||
MetrictypesTypeDTO,
|
MetrictypesTypeDTO,
|
||||||
} from 'api/generated/services/sigNoz.schemas';
|
} from 'api/generated/services/sigNoz.schemas';
|
||||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||||
import * as useOptionsMenuHooks from 'container/OptionsMenu';
|
import * as useOptionsMenuHooks from 'container/OptionsMenu';
|
||||||
import * as useUpdateDashboardHooks from 'hooks/dashboard/useUpdateDashboard';
|
import * as useUpdateDashboardHooks from 'hooks/dashboard/useUpdateDashboard';
|
||||||
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
|
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
|
||||||
@@ -157,26 +157,6 @@ describe('Explorer', () => {
|
|||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should render Explorer query builder with metrics datasource selected', () => {
|
|
||||||
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
|
|
||||||
...mockUseQueryBuilderData,
|
|
||||||
stagedQuery: initialQueriesMap[DataSource.TRACES],
|
|
||||||
} as any);
|
|
||||||
|
|
||||||
(useSearchParams as jest.Mock).mockReturnValue([
|
|
||||||
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
|
|
||||||
mockSetSearchParams,
|
|
||||||
]);
|
|
||||||
|
|
||||||
renderExplorer();
|
|
||||||
|
|
||||||
expect(mockUpdateAllQueriesOperators).toHaveBeenCalledWith(
|
|
||||||
initialQueriesMap[DataSource.METRICS],
|
|
||||||
PANEL_TYPES.TIME_SERIES,
|
|
||||||
DataSource.METRICS,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should enable one chart per query toggle when oneChartPerQuery=true in URL', () => {
|
it('should enable one chart per query toggle when oneChartPerQuery=true in URL', () => {
|
||||||
(useSearchParams as jest.Mock).mockReturnValue([
|
(useSearchParams as jest.Mock).mockReturnValue([
|
||||||
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
|
new URLSearchParams({ isOneChartPerQueryEnabled: 'true' }),
|
||||||
@@ -241,20 +221,46 @@ describe('Explorer', () => {
|
|||||||
expect(yAxisUnitSelector).not.toBeInTheDocument();
|
expect(yAxisUnitSelector).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should hide y axis unit selector for multiple metrics with different units', () => {
|
it('one chart per query toggle should be forced on and disabled when multiple metrics have different units', () => {
|
||||||
|
const mockQueryData = {
|
||||||
|
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
|
||||||
|
aggregateAttribute: {
|
||||||
|
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
|
||||||
|
.aggregateAttribute as BaseAutocompleteData),
|
||||||
|
key: 'metric1',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const mockStagedQueryWithMultipleQueries = {
|
||||||
|
...initialQueriesMap[DataSource.METRICS],
|
||||||
|
builder: {
|
||||||
|
...initialQueriesMap[DataSource.METRICS].builder,
|
||||||
|
queryData: [mockQueryData, mockQueryData],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue(({
|
||||||
|
...mockUseQueryBuilderData,
|
||||||
|
stagedQuery: mockStagedQueryWithMultipleQueries,
|
||||||
|
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
|
||||||
|
|
||||||
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||||
isLoading: false,
|
isLoading: false,
|
||||||
isError: false,
|
isError: false,
|
||||||
metrics: [MOCK_METRIC_METADATA, MOCK_METRIC_METADATA],
|
metrics: [
|
||||||
|
{ ...MOCK_METRIC_METADATA, unit: 'seconds' },
|
||||||
|
{ ...MOCK_METRIC_METADATA, unit: 'bytes' },
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
(useSearchParams as jest.Mock).mockReturnValue([
|
||||||
|
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
|
||||||
|
mockSetSearchParams,
|
||||||
|
]);
|
||||||
|
|
||||||
renderExplorer();
|
renderExplorer();
|
||||||
|
|
||||||
const yAxisUnitSelector = screen.queryByTestId(Y_AXIS_UNIT_SELECTOR_TEST_ID);
|
|
||||||
expect(yAxisUnitSelector).not.toBeInTheDocument();
|
|
||||||
|
|
||||||
// One chart per query toggle should be disabled
|
|
||||||
const oneChartPerQueryToggle = screen.getByRole('switch');
|
const oneChartPerQueryToggle = screen.getByRole('switch');
|
||||||
|
expect(oneChartPerQueryToggle).toBeChecked();
|
||||||
expect(oneChartPerQueryToggle).toBeDisabled();
|
expect(oneChartPerQueryToggle).toBeDisabled();
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -327,4 +333,53 @@ describe('Explorer', () => {
|
|||||||
const oneChartPerQueryToggle = screen.getByRole('switch');
|
const oneChartPerQueryToggle = screen.getByRole('switch');
|
||||||
expect(oneChartPerQueryToggle).toBeEnabled();
|
expect(oneChartPerQueryToggle).toBeEnabled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('one chart per query toggle should be enabled when multiple metrics have no unit', () => {
|
||||||
|
const metricWithNoUnit = {
|
||||||
|
type: MetrictypesTypeDTO.sum,
|
||||||
|
description: 'metric without unit',
|
||||||
|
unit: '',
|
||||||
|
temporality: MetrictypesTemporalityDTO.cumulative,
|
||||||
|
isMonotonic: true,
|
||||||
|
};
|
||||||
|
const mockQueryData = {
|
||||||
|
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
|
||||||
|
aggregateAttribute: {
|
||||||
|
...(initialQueriesMap[DataSource.METRICS].builder.queryData[0]
|
||||||
|
.aggregateAttribute as BaseAutocompleteData),
|
||||||
|
key: 'metric1',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const mockStagedQueryWithMultipleQueries = {
|
||||||
|
...initialQueriesMap[DataSource.METRICS],
|
||||||
|
builder: {
|
||||||
|
...initialQueriesMap[DataSource.METRICS].builder,
|
||||||
|
queryData: [mockQueryData, mockQueryData],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue(({
|
||||||
|
...mockUseQueryBuilderData,
|
||||||
|
stagedQuery: mockStagedQueryWithMultipleQueries,
|
||||||
|
} as Partial<QueryBuilderContextType>) as QueryBuilderContextType);
|
||||||
|
|
||||||
|
jest.spyOn(useGetMetricsHooks, 'useGetMetrics').mockReturnValue({
|
||||||
|
isLoading: false,
|
||||||
|
isError: false,
|
||||||
|
metrics: [metricWithNoUnit, metricWithNoUnit],
|
||||||
|
});
|
||||||
|
|
||||||
|
(useSearchParams as jest.Mock).mockReturnValue([
|
||||||
|
new URLSearchParams({ isOneChartPerQueryEnabled: 'false' }),
|
||||||
|
mockSetSearchParams,
|
||||||
|
]);
|
||||||
|
|
||||||
|
renderExplorer();
|
||||||
|
|
||||||
|
const oneChartPerQueryToggle = screen.getByRole('switch');
|
||||||
|
// Toggle should be enabled (not forced/disabled) since both metrics
|
||||||
|
// have the same unit (no unit) and should be viewable on the same graph
|
||||||
|
expect(oneChartPerQueryToggle).toBeEnabled();
|
||||||
|
expect(oneChartPerQueryToggle).not.toBeChecked();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { render, RenderResult, screen, waitFor } from '@testing-library/react';
|
import { render, screen } from '@testing-library/react';
|
||||||
import userEvent from '@testing-library/user-event';
|
import userEvent from '@testing-library/user-event';
|
||||||
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ const mockSetYAxisUnit = jest.fn();
|
|||||||
|
|
||||||
function renderTimeSeries(
|
function renderTimeSeries(
|
||||||
overrides: Partial<TimeSeriesProps> = {},
|
overrides: Partial<TimeSeriesProps> = {},
|
||||||
): RenderResult {
|
): ReturnType<typeof render> {
|
||||||
return render(
|
return render(
|
||||||
<TimeSeries
|
<TimeSeries
|
||||||
showOneChartPerQuery={false}
|
showOneChartPerQuery={false}
|
||||||
@@ -84,45 +84,57 @@ describe('TimeSeries', () => {
|
|||||||
} as Partial<UseUpdateMetricMetadataReturnType>) as UseUpdateMetricMetadataReturnType);
|
} as Partial<UseUpdateMetricMetadataReturnType>) as UseUpdateMetricMetadataReturnType);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('shows select metric message when no metric is selected', () => {
|
||||||
|
renderTimeSeries({ metricNames: [] });
|
||||||
|
|
||||||
|
expect(
|
||||||
|
screen.getByText('Select a metric and run a query to see the results'),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
expect(screen.queryByText('TimeSeriesView')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders chart view when a metric is selected', () => {
|
||||||
|
renderTimeSeries({
|
||||||
|
metricNames: ['metric1'],
|
||||||
|
metricUnits: ['count'],
|
||||||
|
metrics: [MOCK_METRIC_METADATA],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(screen.getByText('TimeSeriesView')).toBeInTheDocument();
|
||||||
|
expect(
|
||||||
|
screen.queryByText('Select a metric and run a query to see the results'),
|
||||||
|
).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
it('should render a warning icon when a metric has no unit among multiple metrics', () => {
|
it('should render a warning icon when a metric has no unit among multiple metrics', () => {
|
||||||
const user = userEvent.setup();
|
renderTimeSeries({
|
||||||
const { container } = renderTimeSeries({
|
|
||||||
metricUnits: ['', 'count'],
|
metricUnits: ['', 'count'],
|
||||||
metricNames: ['metric1', 'metric2'],
|
metricNames: ['metric1', 'metric2'],
|
||||||
metrics: [undefined, undefined],
|
metrics: [undefined, undefined],
|
||||||
});
|
});
|
||||||
|
|
||||||
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
|
expect(
|
||||||
user.hover(alertIcon);
|
screen.getByRole('img', { name: 'no unit warning' }),
|
||||||
waitFor(() =>
|
).toBeInTheDocument();
|
||||||
expect(
|
|
||||||
screen.findByText('This metric does not have a unit'),
|
|
||||||
).toBeInTheDocument(),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('clicking on warning icon tooltip should open metric details modal', async () => {
|
it('warning tooltip shows metric details link', async () => {
|
||||||
const user = userEvent.setup();
|
const user = userEvent.setup();
|
||||||
const { container } = renderTimeSeries({
|
renderTimeSeries({
|
||||||
metricUnits: ['', 'count'],
|
metricUnits: ['', 'count'],
|
||||||
metricNames: ['metric1', 'metric2'],
|
metricNames: ['metric1', 'metric2'],
|
||||||
metrics: [MOCK_METRIC_METADATA, MOCK_METRIC_METADATA],
|
metrics: [MOCK_METRIC_METADATA, MOCK_METRIC_METADATA],
|
||||||
yAxisUnit: 'seconds',
|
yAxisUnit: 'seconds',
|
||||||
});
|
});
|
||||||
|
|
||||||
const alertIcon = container.querySelector('.no-unit-warning') as HTMLElement;
|
const alertIcon = screen.getByRole('img', { name: 'no unit warning' });
|
||||||
user.hover(alertIcon);
|
await user.hover(alertIcon);
|
||||||
|
|
||||||
const metricDetailsLink = await screen.findByText('metric details');
|
expect(await screen.findByText('metric details')).toBeInTheDocument();
|
||||||
user.click(metricDetailsLink);
|
|
||||||
|
|
||||||
waitFor(() =>
|
|
||||||
expect(mockSetIsMetricDetailsOpen).toHaveBeenCalledWith('metric1'),
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows Save unit button when metric had no unit but one is selected', async () => {
|
it('shows save unit prompt with enabled button when metric has no unit and a unit is selected', async () => {
|
||||||
const { findByText, getByRole } = renderTimeSeries({
|
renderTimeSeries({
|
||||||
metricUnits: [undefined],
|
metricUnits: [undefined],
|
||||||
metricNames: ['metric1'],
|
metricNames: ['metric1'],
|
||||||
metrics: [MOCK_METRIC_METADATA],
|
metrics: [MOCK_METRIC_METADATA],
|
||||||
@@ -131,38 +143,10 @@ describe('TimeSeries', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
await findByText('Save the selected unit for this metric?'),
|
await screen.findByText('Set the selected unit as the metric unit?'),
|
||||||
).toBeInTheDocument();
|
).toBeInTheDocument();
|
||||||
|
|
||||||
const yesButton = getByRole('button', { name: 'Yes' });
|
const yesButton = screen.getByRole('button', { name: 'Yes' });
|
||||||
expect(yesButton).toBeInTheDocument();
|
|
||||||
expect(yesButton).toBeEnabled();
|
expect(yesButton).toBeEnabled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('clicking on save unit button shoould upated metric metadata', async () => {
|
|
||||||
const user = userEvent.setup();
|
|
||||||
const { getByRole } = renderTimeSeries({
|
|
||||||
metricUnits: [''],
|
|
||||||
metricNames: ['metric1'],
|
|
||||||
metrics: [MOCK_METRIC_METADATA],
|
|
||||||
yAxisUnit: 'seconds',
|
|
||||||
showYAxisUnitSelector: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
const yesButton = getByRole('button', { name: /Yes/i });
|
|
||||||
await user.click(yesButton);
|
|
||||||
|
|
||||||
expect(mockUpdateMetricMetadata).toHaveBeenCalledWith(
|
|
||||||
{
|
|
||||||
pathParams: {
|
|
||||||
metricName: 'metric1',
|
|
||||||
},
|
|
||||||
data: expect.objectContaining({ unit: 'seconds' }),
|
|
||||||
},
|
|
||||||
expect.objectContaining({
|
|
||||||
onSuccess: expect.any(Function),
|
|
||||||
onError: expect.any(Function),
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -139,4 +139,14 @@ describe('getMetricUnits', () => {
|
|||||||
expect(result).toHaveLength(1);
|
expect(result).toHaveLength(1);
|
||||||
expect(result[0]).toBe('s');
|
expect(result[0]).toBe('s');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should return undefined for metrics with no unit', () => {
|
||||||
|
const result = getMetricUnits([
|
||||||
|
{ ...MOCK_METRIC_METADATA, unit: '' },
|
||||||
|
{ ...MOCK_METRIC_METADATA, unit: '' },
|
||||||
|
]);
|
||||||
|
expect(result).toHaveLength(2);
|
||||||
|
expect(result[0]).toBeUndefined();
|
||||||
|
expect(result[1]).toBeUndefined();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { Typography } from 'antd';
|
import { Typography } from 'antd';
|
||||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||||
import { AggregatorFilter } from 'container/QueryBuilder/filters';
|
import { MetricNameSelector } from 'container/QueryBuilder/filters';
|
||||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
|
|
||||||
@@ -27,7 +27,7 @@ function MetricNameSearch({
|
|||||||
className="inspect-metrics-input-group metric-name-search"
|
className="inspect-metrics-input-group metric-name-search"
|
||||||
>
|
>
|
||||||
<Typography.Text>From</Typography.Text>
|
<Typography.Text>From</Typography.Text>
|
||||||
<AggregatorFilter
|
<MetricNameSelector
|
||||||
defaultValue={searchText ?? ''}
|
defaultValue={searchText ?? ''}
|
||||||
query={initialQueriesMap[DataSource.METRICS].builder.queryData[0]}
|
query={initialQueriesMap[DataSource.METRICS].builder.queryData[0]}
|
||||||
onSelect={handleSetMetricName}
|
onSelect={handleSetMetricName}
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||||
// eslint-disable-next-line no-restricted-imports
|
// eslint-disable-next-line no-restricted-imports
|
||||||
import { Provider } from 'react-redux';
|
import { Provider } from 'react-redux';
|
||||||
import { render, screen } from '@testing-library/react';
|
import { fireEvent, render, screen, within } from '@testing-library/react';
|
||||||
import userEvent from '@testing-library/user-event';
|
import userEvent from '@testing-library/user-event';
|
||||||
|
import * as metricsService from 'api/generated/services/metrics';
|
||||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||||
import * as appContextHooks from 'providers/App/App';
|
import * as appContextHooks from 'providers/App/App';
|
||||||
import store from 'store';
|
import store from 'store';
|
||||||
@@ -23,27 +24,31 @@ jest.mock('react-router-dom', () => ({
|
|||||||
}),
|
}),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
jest.mock('container/QueryBuilder/filters', () => ({
|
jest.mock('api/generated/services/metrics', () => ({
|
||||||
AggregatorFilter: ({ onSelect, onChange, defaultValue }: any): JSX.Element => (
|
useListMetrics: jest.fn().mockReturnValue({
|
||||||
<div data-testid="mock-aggregator-filter">
|
isFetching: false,
|
||||||
<input
|
isError: false,
|
||||||
data-testid="metric-name-input"
|
data: { data: { metrics: [] } },
|
||||||
defaultValue={defaultValue}
|
}),
|
||||||
onChange={(e: React.ChangeEvent<HTMLInputElement>): void =>
|
useUpdateMetricMetadata: jest.fn().mockReturnValue({
|
||||||
onChange({ key: e.target.value })
|
mutate: jest.fn(),
|
||||||
}
|
isLoading: false,
|
||||||
/>
|
}),
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
data-testid="select-metric-button"
|
|
||||||
onClick={(): void => onSelect({ key: 'test_metric_2' })}
|
|
||||||
>
|
|
||||||
Select Metric
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
),
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
jest.mock('hooks/useDebounce', () => ({
|
||||||
|
__esModule: true,
|
||||||
|
default: <T,>(value: T): T => value,
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock(
|
||||||
|
'container/QueryBuilder/filters/QueryBuilderSearch/OptionRenderer',
|
||||||
|
() => ({
|
||||||
|
__esModule: true,
|
||||||
|
default: ({ value }: { value: string }): JSX.Element => <span>{value}</span>,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||||
user: {
|
user: {
|
||||||
role: 'admin',
|
role: 'admin',
|
||||||
@@ -123,6 +128,24 @@ describe('QueryBuilder', () => {
|
|||||||
|
|
||||||
it('should call setCurrentMetricName when metric name is selected', async () => {
|
it('should call setCurrentMetricName when metric name is selected', async () => {
|
||||||
const user = userEvent.setup();
|
const user = userEvent.setup();
|
||||||
|
(metricsService.useListMetrics as jest.Mock).mockReturnValue({
|
||||||
|
isFetching: false,
|
||||||
|
isError: false,
|
||||||
|
data: {
|
||||||
|
data: {
|
||||||
|
metrics: [
|
||||||
|
{
|
||||||
|
metricName: 'test_metric_2',
|
||||||
|
type: 'Sum',
|
||||||
|
isMonotonic: true,
|
||||||
|
description: '',
|
||||||
|
temporality: 'cumulative',
|
||||||
|
unit: '',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
render(
|
render(
|
||||||
<QueryClientProvider client={queryClient}>
|
<QueryClientProvider client={queryClient}>
|
||||||
@@ -137,8 +160,12 @@ describe('QueryBuilder', () => {
|
|||||||
|
|
||||||
expect(screen.getByText('From')).toBeInTheDocument();
|
expect(screen.getByText('From')).toBeInTheDocument();
|
||||||
|
|
||||||
const selectButton = screen.getByTestId('select-metric-button');
|
const input = within(metricNameSearch).getByRole('combobox');
|
||||||
await user.click(selectButton);
|
fireEvent.change(input, { target: { value: 'test_metric_2' } });
|
||||||
|
|
||||||
|
const options = document.querySelectorAll('.ant-select-item');
|
||||||
|
expect(options.length).toBeGreaterThan(0);
|
||||||
|
await user.click(options[0] as HTMLElement);
|
||||||
|
|
||||||
expect(mockSetCurrentMetricName).toHaveBeenCalledWith('test_metric_2');
|
expect(mockSetCurrentMetricName).toHaveBeenCalledWith('test_metric_2');
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { useCallback, useMemo, useState } from 'react';
|
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||||
import { useCopyToClipboard } from 'react-use';
|
import { useCopyToClipboard } from 'react-use';
|
||||||
import {
|
import {
|
||||||
Button,
|
Button,
|
||||||
@@ -6,7 +6,7 @@ import {
|
|||||||
Input,
|
Input,
|
||||||
Menu,
|
Menu,
|
||||||
Popover,
|
Popover,
|
||||||
Skeleton,
|
Tooltip,
|
||||||
Typography,
|
Typography,
|
||||||
} from 'antd';
|
} from 'antd';
|
||||||
import { ColumnsType } from 'antd/es/table';
|
import { ColumnsType } from 'antd/es/table';
|
||||||
@@ -14,148 +14,50 @@ import logEvent from 'api/common/logEvent';
|
|||||||
import { useGetMetricAttributes } from 'api/generated/services/metrics';
|
import { useGetMetricAttributes } from 'api/generated/services/metrics';
|
||||||
import { ResizeTable } from 'components/ResizeTable';
|
import { ResizeTable } from 'components/ResizeTable';
|
||||||
import { DataType } from 'container/LogDetailedView/TableView';
|
import { DataType } from 'container/LogDetailedView/TableView';
|
||||||
import { useNotifications } from 'hooks/useNotifications';
|
import { Check, Copy, Info, Search, SquareArrowOutUpRight } from 'lucide-react';
|
||||||
import { Compass, Copy, Search } from 'lucide-react';
|
|
||||||
|
|
||||||
import { PANEL_TYPES } from '../../../constants/queryBuilder';
|
import { PANEL_TYPES } from '../../../constants/queryBuilder';
|
||||||
import ROUTES from '../../../constants/routes';
|
import ROUTES from '../../../constants/routes';
|
||||||
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
|
import { useHandleExplorerTabChange } from '../../../hooks/useHandleExplorerTabChange';
|
||||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||||
import MetricDetailsErrorState from './MetricDetailsErrorState';
|
|
||||||
import {
|
import {
|
||||||
AllAttributesEmptyTextProps,
|
AllAttributesEmptyText,
|
||||||
AllAttributesProps,
|
AllAttributesValue,
|
||||||
AllAttributesValueProps,
|
} from './AllAttributesValue';
|
||||||
} from './types';
|
import { AllAttributesProps } from './types';
|
||||||
import { getMetricDetailsQuery } from './utils';
|
import { getMetricDetailsQuery } from './utils';
|
||||||
|
|
||||||
const ALL_ATTRIBUTES_KEY = 'all-attributes';
|
const ALL_ATTRIBUTES_KEY = 'all-attributes';
|
||||||
|
const COPY_FEEDBACK_DURATION_MS = 1500;
|
||||||
function AllAttributesEmptyText({
|
|
||||||
isErrorAttributes,
|
|
||||||
refetchAttributes,
|
|
||||||
}: AllAttributesEmptyTextProps): JSX.Element {
|
|
||||||
if (isErrorAttributes) {
|
|
||||||
return (
|
|
||||||
<div className="all-attributes-error-state">
|
|
||||||
<MetricDetailsErrorState
|
|
||||||
refetch={refetchAttributes}
|
|
||||||
errorMessage="Something went wrong while fetching attributes"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return <Typography.Text>No attributes found</Typography.Text>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function AllAttributesValue({
|
|
||||||
filterKey,
|
|
||||||
filterValue,
|
|
||||||
goToMetricsExploreWithAppliedAttribute,
|
|
||||||
}: AllAttributesValueProps): JSX.Element {
|
|
||||||
const [visibleIndex, setVisibleIndex] = useState(5);
|
|
||||||
const [attributePopoverKey, setAttributePopoverKey] = useState<string | null>(
|
|
||||||
null,
|
|
||||||
);
|
|
||||||
const [, copyToClipboard] = useCopyToClipboard();
|
|
||||||
const { notifications } = useNotifications();
|
|
||||||
|
|
||||||
const handleShowMore = (): void => {
|
|
||||||
setVisibleIndex(visibleIndex + 5);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleMenuItemClick = useCallback(
|
|
||||||
(key: string, attribute: string): void => {
|
|
||||||
switch (key) {
|
|
||||||
case 'open-in-explorer':
|
|
||||||
goToMetricsExploreWithAppliedAttribute(filterKey, attribute);
|
|
||||||
break;
|
|
||||||
case 'copy-attribute':
|
|
||||||
copyToClipboard(attribute);
|
|
||||||
notifications.success({
|
|
||||||
message: 'Attribute copied!',
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
setAttributePopoverKey(null);
|
|
||||||
},
|
|
||||||
[
|
|
||||||
goToMetricsExploreWithAppliedAttribute,
|
|
||||||
filterKey,
|
|
||||||
copyToClipboard,
|
|
||||||
notifications,
|
|
||||||
],
|
|
||||||
);
|
|
||||||
|
|
||||||
const attributePopoverContent = useCallback(
|
|
||||||
(attribute: string) => (
|
|
||||||
<Menu
|
|
||||||
items={[
|
|
||||||
{
|
|
||||||
icon: <Compass size={16} />,
|
|
||||||
label: 'Open in Explorer',
|
|
||||||
key: 'open-in-explorer',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
icon: <Copy size={16} />,
|
|
||||||
label: 'Copy Attribute',
|
|
||||||
key: 'copy-attribute',
|
|
||||||
},
|
|
||||||
]}
|
|
||||||
onClick={(info): void => {
|
|
||||||
handleMenuItemClick(info.key, attribute);
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
),
|
|
||||||
[handleMenuItemClick],
|
|
||||||
);
|
|
||||||
return (
|
|
||||||
<div className="all-attributes-value">
|
|
||||||
{filterValue.slice(0, visibleIndex).map((attribute) => (
|
|
||||||
<Popover
|
|
||||||
key={attribute}
|
|
||||||
content={attributePopoverContent(attribute)}
|
|
||||||
trigger="click"
|
|
||||||
open={attributePopoverKey === `${filterKey}-${attribute}`}
|
|
||||||
onOpenChange={(open): void => {
|
|
||||||
if (!open) {
|
|
||||||
setAttributePopoverKey(null);
|
|
||||||
} else {
|
|
||||||
setAttributePopoverKey(`${filterKey}-${attribute}`);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<Button key={attribute} type="text">
|
|
||||||
<Typography.Text>{attribute}</Typography.Text>
|
|
||||||
</Button>
|
|
||||||
</Popover>
|
|
||||||
))}
|
|
||||||
{visibleIndex < filterValue.length && (
|
|
||||||
<Button type="text" onClick={handleShowMore}>
|
|
||||||
Show More
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function AllAttributes({
|
function AllAttributes({
|
||||||
metricName,
|
metricName,
|
||||||
metricType,
|
metricType,
|
||||||
|
isMonotonic,
|
||||||
|
minTime,
|
||||||
|
maxTime,
|
||||||
}: AllAttributesProps): JSX.Element {
|
}: AllAttributesProps): JSX.Element {
|
||||||
const [searchString, setSearchString] = useState('');
|
const [searchString, setSearchString] = useState('');
|
||||||
const [activeKey, setActiveKey] = useState<string[]>([ALL_ATTRIBUTES_KEY]);
|
const [activeKey, setActiveKey] = useState<string[]>([ALL_ATTRIBUTES_KEY]);
|
||||||
|
const [keyPopoverOpen, setKeyPopoverOpen] = useState<string | null>(null);
|
||||||
|
const [copiedKey, setCopiedKey] = useState<string | null>(null);
|
||||||
|
const [, copyToClipboard] = useCopyToClipboard();
|
||||||
|
const copyTimerRef = useRef<ReturnType<typeof setTimeout>>();
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: attributesData,
|
data: attributesData,
|
||||||
isLoading: isLoadingAttributes,
|
isLoading: isLoadingAttributes,
|
||||||
isError: isErrorAttributes,
|
isError: isErrorAttributes,
|
||||||
refetch: refetchAttributes,
|
refetch: refetchAttributes,
|
||||||
} = useGetMetricAttributes({
|
} = useGetMetricAttributes(
|
||||||
metricName,
|
{
|
||||||
});
|
metricName,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
start: minTime ? Math.floor(minTime / 1000000) : undefined,
|
||||||
|
end: maxTime ? Math.floor(maxTime / 1000000) : undefined,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
const attributes = useMemo(() => attributesData?.data.attributes ?? [], [
|
const attributes = useMemo(() => attributesData?.data.attributes ?? [], [
|
||||||
attributesData,
|
attributesData,
|
||||||
@@ -164,12 +66,15 @@ function AllAttributes({
|
|||||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||||
|
|
||||||
const goToMetricsExplorerwithAppliedSpaceAggregation = useCallback(
|
const goToMetricsExplorerwithAppliedSpaceAggregation = useCallback(
|
||||||
(groupBy: string) => {
|
(groupBy: string, valueCount?: number) => {
|
||||||
|
const limit = valueCount && valueCount > 250 ? 100 : undefined;
|
||||||
const compositeQuery = getMetricDetailsQuery(
|
const compositeQuery = getMetricDetailsQuery(
|
||||||
metricName,
|
metricName,
|
||||||
metricType,
|
metricType,
|
||||||
undefined,
|
undefined,
|
||||||
groupBy,
|
groupBy,
|
||||||
|
limit,
|
||||||
|
isMonotonic,
|
||||||
);
|
);
|
||||||
handleExplorerTabChange(
|
handleExplorerTabChange(
|
||||||
PANEL_TYPES.TIME_SERIES,
|
PANEL_TYPES.TIME_SERIES,
|
||||||
@@ -179,6 +84,7 @@ function AllAttributes({
|
|||||||
id: metricName,
|
id: metricName,
|
||||||
},
|
},
|
||||||
ROUTES.METRICS_EXPLORER_EXPLORER,
|
ROUTES.METRICS_EXPLORER_EXPLORER,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
logEvent(MetricsExplorerEvents.OpenInExplorerClicked, {
|
logEvent(MetricsExplorerEvents.OpenInExplorerClicked, {
|
||||||
[MetricsExplorerEventKeys.MetricName]: metricName,
|
[MetricsExplorerEventKeys.MetricName]: metricName,
|
||||||
@@ -187,15 +93,19 @@ function AllAttributes({
|
|||||||
[MetricsExplorerEventKeys.AttributeKey]: groupBy,
|
[MetricsExplorerEventKeys.AttributeKey]: groupBy,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
[metricName, metricType, handleExplorerTabChange],
|
[metricName, metricType, isMonotonic, handleExplorerTabChange],
|
||||||
);
|
);
|
||||||
|
|
||||||
const goToMetricsExploreWithAppliedAttribute = useCallback(
|
const goToMetricsExploreWithAppliedAttribute = useCallback(
|
||||||
(key: string, value: string) => {
|
(key: string, value: string) => {
|
||||||
const compositeQuery = getMetricDetailsQuery(metricName, metricType, {
|
const compositeQuery = getMetricDetailsQuery(
|
||||||
key,
|
metricName,
|
||||||
value,
|
metricType,
|
||||||
});
|
{ key, value },
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
isMonotonic,
|
||||||
|
);
|
||||||
handleExplorerTabChange(
|
handleExplorerTabChange(
|
||||||
PANEL_TYPES.TIME_SERIES,
|
PANEL_TYPES.TIME_SERIES,
|
||||||
{
|
{
|
||||||
@@ -204,6 +114,7 @@ function AllAttributes({
|
|||||||
id: metricName,
|
id: metricName,
|
||||||
},
|
},
|
||||||
ROUTES.METRICS_EXPLORER_EXPLORER,
|
ROUTES.METRICS_EXPLORER_EXPLORER,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
logEvent(MetricsExplorerEvents.OpenInExplorerClicked, {
|
logEvent(MetricsExplorerEvents.OpenInExplorerClicked, {
|
||||||
[MetricsExplorerEventKeys.MetricName]: metricName,
|
[MetricsExplorerEventKeys.MetricName]: metricName,
|
||||||
@@ -213,7 +124,29 @@ function AllAttributes({
|
|||||||
[MetricsExplorerEventKeys.AttributeValue]: value,
|
[MetricsExplorerEventKeys.AttributeValue]: value,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
[metricName, metricType, handleExplorerTabChange],
|
[metricName, metricType, isMonotonic, handleExplorerTabChange],
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleKeyMenuItemClick = useCallback(
|
||||||
|
(menuKey: string, attributeKey: string, valueCount?: number): void => {
|
||||||
|
switch (menuKey) {
|
||||||
|
case 'open-in-explorer':
|
||||||
|
goToMetricsExplorerwithAppliedSpaceAggregation(attributeKey, valueCount);
|
||||||
|
break;
|
||||||
|
case 'copy-key':
|
||||||
|
copyToClipboard(attributeKey);
|
||||||
|
setCopiedKey(attributeKey);
|
||||||
|
clearTimeout(copyTimerRef.current);
|
||||||
|
copyTimerRef.current = setTimeout(() => {
|
||||||
|
setCopiedKey(null);
|
||||||
|
}, COPY_FEEDBACK_DURATION_MS);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
setKeyPopoverOpen(null);
|
||||||
|
},
|
||||||
|
[goToMetricsExplorerwithAppliedSpaceAggregation, copyToClipboard],
|
||||||
);
|
);
|
||||||
|
|
||||||
const filteredAttributes = useMemo(
|
const filteredAttributes = useMemo(
|
||||||
@@ -254,21 +187,57 @@ function AllAttributes({
|
|||||||
width: 50,
|
width: 50,
|
||||||
align: 'left',
|
align: 'left',
|
||||||
className: 'metric-metadata-key',
|
className: 'metric-metadata-key',
|
||||||
render: (field: { label: string; contribution: number }): JSX.Element => (
|
render: (field: { label: string; contribution: number }): JSX.Element => {
|
||||||
<div className="all-attributes-key">
|
const isCopied = copiedKey === field.label;
|
||||||
<Button
|
return (
|
||||||
type="text"
|
<div className="all-attributes-key">
|
||||||
onClick={(): void =>
|
<Popover
|
||||||
goToMetricsExplorerwithAppliedSpaceAggregation(field.label)
|
content={
|
||||||
}
|
<Menu
|
||||||
>
|
items={[
|
||||||
<Typography.Text>{field.label}</Typography.Text>
|
{
|
||||||
</Button>
|
icon: <SquareArrowOutUpRight size={14} />,
|
||||||
<Typography.Text className="all-attributes-contribution">
|
label: 'Open in Metric Explorer',
|
||||||
{field.contribution}
|
key: 'open-in-explorer',
|
||||||
</Typography.Text>
|
},
|
||||||
</div>
|
{
|
||||||
),
|
icon: <Copy size={14} />,
|
||||||
|
label: 'Copy Key',
|
||||||
|
key: 'copy-key',
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
onClick={(info): void => {
|
||||||
|
handleKeyMenuItemClick(info.key, field.label, field.contribution);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
trigger="click"
|
||||||
|
placement="right"
|
||||||
|
overlayClassName="metric-details-popover attribute-key-popover-overlay"
|
||||||
|
open={keyPopoverOpen === field.label}
|
||||||
|
onOpenChange={(open): void => {
|
||||||
|
if (!open) {
|
||||||
|
setKeyPopoverOpen(null);
|
||||||
|
} else {
|
||||||
|
setKeyPopoverOpen(field.label);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<Button type="text">
|
||||||
|
<Typography.Text>{field.label}</Typography.Text>
|
||||||
|
</Button>
|
||||||
|
</Popover>
|
||||||
|
{isCopied && (
|
||||||
|
<span className="copy-feedback">
|
||||||
|
<Check size={12} />
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
<Typography.Text className="all-attributes-contribution">
|
||||||
|
{field.contribution}
|
||||||
|
</Typography.Text>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: 'Value',
|
title: 'Value',
|
||||||
@@ -291,7 +260,9 @@ function AllAttributes({
|
|||||||
],
|
],
|
||||||
[
|
[
|
||||||
goToMetricsExploreWithAppliedAttribute,
|
goToMetricsExploreWithAppliedAttribute,
|
||||||
goToMetricsExplorerwithAppliedSpaceAggregation,
|
handleKeyMenuItemClick,
|
||||||
|
keyPopoverOpen,
|
||||||
|
copiedKey,
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -300,7 +271,12 @@ function AllAttributes({
|
|||||||
{
|
{
|
||||||
label: (
|
label: (
|
||||||
<div className="metrics-accordion-header">
|
<div className="metrics-accordion-header">
|
||||||
<Typography.Text>All Attributes</Typography.Text>
|
<div className="all-attributes-header-title">
|
||||||
|
<Typography.Text>All Attributes</Typography.Text>
|
||||||
|
<Tooltip title="Showing attributes for the selected time range">
|
||||||
|
<Info size={14} />
|
||||||
|
</Tooltip>
|
||||||
|
</div>
|
||||||
<Input
|
<Input
|
||||||
className="all-attributes-search-input"
|
className="all-attributes-search-input"
|
||||||
placeholder="Search"
|
placeholder="Search"
|
||||||
@@ -329,7 +305,9 @@ function AllAttributes({
|
|||||||
className="metrics-accordion-content all-attributes-content"
|
className="metrics-accordion-content all-attributes-content"
|
||||||
scroll={{ y: 600 }}
|
scroll={{ y: 600 }}
|
||||||
locale={{
|
locale={{
|
||||||
emptyText: (
|
emptyText: isLoadingAttributes ? (
|
||||||
|
' '
|
||||||
|
) : (
|
||||||
<AllAttributesEmptyText
|
<AllAttributesEmptyText
|
||||||
isErrorAttributes={isErrorAttributes}
|
isErrorAttributes={isErrorAttributes}
|
||||||
refetchAttributes={refetchAttributes}
|
refetchAttributes={refetchAttributes}
|
||||||
@@ -350,14 +328,6 @@ function AllAttributes({
|
|||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
if (isLoadingAttributes) {
|
|
||||||
return (
|
|
||||||
<div className="all-attributes-skeleton-container">
|
|
||||||
<Skeleton active paragraph={{ rows: 8 }} />
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Collapse
|
<Collapse
|
||||||
bordered
|
bordered
|
||||||
|
|||||||
@@ -0,0 +1,212 @@
|
|||||||
|
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||||
|
import { useCopyToClipboard } from 'react-use';
|
||||||
|
import { Button, Input, Menu, Popover, Tooltip, Typography } from 'antd';
|
||||||
|
import { Check, Copy, Search, SquareArrowOutUpRight } from 'lucide-react';
|
||||||
|
|
||||||
|
import MetricDetailsErrorState from './MetricDetailsErrorState';
|
||||||
|
import { AllAttributesEmptyTextProps, AllAttributesValueProps } from './types';
|
||||||
|
|
||||||
|
const INITIAL_VISIBLE_COUNT = 5;
|
||||||
|
const COPY_FEEDBACK_DURATION_MS = 1500;
|
||||||
|
|
||||||
|
export function AllAttributesEmptyText({
|
||||||
|
isErrorAttributes,
|
||||||
|
refetchAttributes,
|
||||||
|
}: AllAttributesEmptyTextProps): JSX.Element {
|
||||||
|
if (isErrorAttributes) {
|
||||||
|
return (
|
||||||
|
<div className="all-attributes-error-state">
|
||||||
|
<MetricDetailsErrorState
|
||||||
|
refetch={refetchAttributes}
|
||||||
|
errorMessage="Something went wrong while fetching attributes"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return <Typography.Text>No attributes found</Typography.Text>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function AllAttributesValue({
|
||||||
|
filterKey,
|
||||||
|
filterValue,
|
||||||
|
goToMetricsExploreWithAppliedAttribute,
|
||||||
|
}: AllAttributesValueProps): JSX.Element {
|
||||||
|
const [attributePopoverKey, setAttributePopoverKey] = useState<string | null>(
|
||||||
|
null,
|
||||||
|
);
|
||||||
|
const [allValuesOpen, setAllValuesOpen] = useState(false);
|
||||||
|
const [allValuesSearch, setAllValuesSearch] = useState('');
|
||||||
|
const [copiedValue, setCopiedValue] = useState<string | null>(null);
|
||||||
|
const [, copyToClipboard] = useCopyToClipboard();
|
||||||
|
const copyTimerRef = useRef<ReturnType<typeof setTimeout>>();
|
||||||
|
|
||||||
|
const handleCopyWithFeedback = useCallback(
|
||||||
|
(value: string): void => {
|
||||||
|
copyToClipboard(value);
|
||||||
|
setCopiedValue(value);
|
||||||
|
clearTimeout(copyTimerRef.current);
|
||||||
|
copyTimerRef.current = setTimeout(() => {
|
||||||
|
setCopiedValue(null);
|
||||||
|
}, COPY_FEEDBACK_DURATION_MS);
|
||||||
|
},
|
||||||
|
[copyToClipboard],
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleMenuItemClick = useCallback(
|
||||||
|
(key: string, attribute: string): void => {
|
||||||
|
switch (key) {
|
||||||
|
case 'open-in-explorer':
|
||||||
|
goToMetricsExploreWithAppliedAttribute(filterKey, attribute);
|
||||||
|
break;
|
||||||
|
case 'copy-value':
|
||||||
|
handleCopyWithFeedback(attribute);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
setAttributePopoverKey(null);
|
||||||
|
},
|
||||||
|
[goToMetricsExploreWithAppliedAttribute, filterKey, handleCopyWithFeedback],
|
||||||
|
);
|
||||||
|
|
||||||
|
const attributePopoverContent = useCallback(
|
||||||
|
(attribute: string) => (
|
||||||
|
<Menu
|
||||||
|
items={[
|
||||||
|
{
|
||||||
|
icon: <SquareArrowOutUpRight size={14} />,
|
||||||
|
label: 'Open in Metric Explorer',
|
||||||
|
key: 'open-in-explorer',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
icon: <Copy size={14} />,
|
||||||
|
label: 'Copy Value',
|
||||||
|
key: 'copy-value',
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
onClick={(info): void => {
|
||||||
|
handleMenuItemClick(info.key, attribute);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
[handleMenuItemClick],
|
||||||
|
);
|
||||||
|
|
||||||
|
const filteredAllValues = useMemo(
|
||||||
|
() =>
|
||||||
|
allValuesSearch
|
||||||
|
? filterValue.filter((v) =>
|
||||||
|
v.toLowerCase().includes(allValuesSearch.toLowerCase()),
|
||||||
|
)
|
||||||
|
: filterValue,
|
||||||
|
[filterValue, allValuesSearch],
|
||||||
|
);
|
||||||
|
|
||||||
|
const allValuesPopoverContent = (
|
||||||
|
<div className="all-values-popover">
|
||||||
|
<Input
|
||||||
|
placeholder="Search values"
|
||||||
|
size="small"
|
||||||
|
prefix={<Search size={12} />}
|
||||||
|
value={allValuesSearch}
|
||||||
|
onChange={(e): void => setAllValuesSearch(e.target.value)}
|
||||||
|
allowClear
|
||||||
|
/>
|
||||||
|
<div className="all-values-list">
|
||||||
|
{allValuesOpen &&
|
||||||
|
filteredAllValues.map((attribute) => {
|
||||||
|
const isCopied = copiedValue === attribute;
|
||||||
|
return (
|
||||||
|
<div key={attribute} className="all-values-item">
|
||||||
|
<Typography.Text ellipsis className="all-values-item-text">
|
||||||
|
{attribute}
|
||||||
|
</Typography.Text>
|
||||||
|
<div className="all-values-item-actions">
|
||||||
|
<Tooltip title={isCopied ? 'Copied!' : 'Copy value'}>
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
size="small"
|
||||||
|
className={isCopied ? 'copy-success' : ''}
|
||||||
|
icon={isCopied ? <Check size={12} /> : <Copy size={12} />}
|
||||||
|
onClick={(): void => {
|
||||||
|
handleCopyWithFeedback(attribute);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Tooltip>
|
||||||
|
<Tooltip title="Open in Metric Explorer">
|
||||||
|
<Button
|
||||||
|
type="text"
|
||||||
|
size="small"
|
||||||
|
icon={<SquareArrowOutUpRight size={12} />}
|
||||||
|
onClick={(): void => {
|
||||||
|
goToMetricsExploreWithAppliedAttribute(filterKey, attribute);
|
||||||
|
setAllValuesOpen(false);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</Tooltip>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
{allValuesOpen && filteredAllValues.length === 0 && (
|
||||||
|
<Typography.Text type="secondary" className="all-values-empty">
|
||||||
|
No values found
|
||||||
|
</Typography.Text>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="all-attributes-value">
|
||||||
|
{filterValue.slice(0, INITIAL_VISIBLE_COUNT).map((attribute) => {
|
||||||
|
const isCopied = copiedValue === attribute;
|
||||||
|
return (
|
||||||
|
<div key={attribute} className="all-attributes-value-item">
|
||||||
|
<Popover
|
||||||
|
content={attributePopoverContent(attribute)}
|
||||||
|
trigger="click"
|
||||||
|
overlayClassName="metric-details-popover attribute-value-popover-overlay"
|
||||||
|
open={attributePopoverKey === `${filterKey}-${attribute}`}
|
||||||
|
onOpenChange={(open): void => {
|
||||||
|
if (!open) {
|
||||||
|
setAttributePopoverKey(null);
|
||||||
|
} else {
|
||||||
|
setAttributePopoverKey(`${filterKey}-${attribute}`);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<Button type="text">
|
||||||
|
<Typography.Text>{attribute}</Typography.Text>
|
||||||
|
</Button>
|
||||||
|
</Popover>
|
||||||
|
{isCopied && (
|
||||||
|
<span className="copy-feedback">
|
||||||
|
<Check size={12} />
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
{filterValue.length > INITIAL_VISIBLE_COUNT && (
|
||||||
|
<Popover
|
||||||
|
content={allValuesPopoverContent}
|
||||||
|
trigger="click"
|
||||||
|
open={allValuesOpen}
|
||||||
|
onOpenChange={(open): void => {
|
||||||
|
setAllValuesOpen(open);
|
||||||
|
if (!open) {
|
||||||
|
setAllValuesSearch('');
|
||||||
|
setCopiedValue(null);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
overlayClassName="metric-details-popover all-values-popover-overlay"
|
||||||
|
>
|
||||||
|
<Button type="text" className="all-values-button">
|
||||||
|
All values ({filterValue.length})
|
||||||
|
</Button>
|
||||||
|
</Popover>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -9,9 +9,7 @@ import {
|
|||||||
} from 'api/generated/services/metrics';
|
} from 'api/generated/services/metrics';
|
||||||
import { QueryParams } from 'constants/query';
|
import { QueryParams } from 'constants/query';
|
||||||
import ROUTES from 'constants/routes';
|
import ROUTES from 'constants/routes';
|
||||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
|
||||||
import useUrlQuery from 'hooks/useUrlQuery';
|
import useUrlQuery from 'hooks/useUrlQuery';
|
||||||
import history from 'lib/history';
|
|
||||||
import { Bell, Grid } from 'lucide-react';
|
import { Bell, Grid } from 'lucide-react';
|
||||||
import { pluralize } from 'utils/pluralize';
|
import { pluralize } from 'utils/pluralize';
|
||||||
|
|
||||||
@@ -20,7 +18,6 @@ import { DashboardsAndAlertsPopoverProps } from './types';
|
|||||||
function DashboardsAndAlertsPopover({
|
function DashboardsAndAlertsPopover({
|
||||||
metricName,
|
metricName,
|
||||||
}: DashboardsAndAlertsPopoverProps): JSX.Element | null {
|
}: DashboardsAndAlertsPopoverProps): JSX.Element | null {
|
||||||
const { safeNavigate } = useSafeNavigate();
|
|
||||||
const params = useUrlQuery();
|
const params = useUrlQuery();
|
||||||
|
|
||||||
const {
|
const {
|
||||||
@@ -75,7 +72,7 @@ function DashboardsAndAlertsPopover({
|
|||||||
key={alert.alertId}
|
key={alert.alertId}
|
||||||
onClick={(): void => {
|
onClick={(): void => {
|
||||||
params.set(QueryParams.ruleId, alert.alertId);
|
params.set(QueryParams.ruleId, alert.alertId);
|
||||||
history.push(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`);
|
window.open(`${ROUTES.ALERT_OVERVIEW}?${params.toString()}`, '_blank');
|
||||||
}}
|
}}
|
||||||
className="dashboards-popover-content-item"
|
className="dashboards-popover-content-item"
|
||||||
>
|
>
|
||||||
@@ -95,10 +92,11 @@ function DashboardsAndAlertsPopover({
|
|||||||
<Typography.Link
|
<Typography.Link
|
||||||
key={dashboard.dashboardId}
|
key={dashboard.dashboardId}
|
||||||
onClick={(): void => {
|
onClick={(): void => {
|
||||||
safeNavigate(
|
window.open(
|
||||||
generatePath(ROUTES.DASHBOARD, {
|
generatePath(ROUTES.DASHBOARD, {
|
||||||
dashboardId: dashboard.dashboardId,
|
dashboardId: dashboard.dashboardId,
|
||||||
}),
|
}),
|
||||||
|
'_blank',
|
||||||
);
|
);
|
||||||
}}
|
}}
|
||||||
className="dashboards-popover-content-item"
|
className="dashboards-popover-content-item"
|
||||||
@@ -109,7 +107,7 @@ function DashboardsAndAlertsPopover({
|
|||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}, [dashboards, safeNavigate]);
|
}, [dashboards]);
|
||||||
|
|
||||||
if (isLoadingAlerts || isLoadingDashboards) {
|
if (isLoadingAlerts || isLoadingDashboards) {
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { Color } from '@signozhq/design-tokens';
|
import { Color } from '@signozhq/design-tokens';
|
||||||
import { Button, Skeleton, Tooltip, Typography } from 'antd';
|
import { Button, Spin, Tooltip, Typography } from 'antd';
|
||||||
import { useGetMetricHighlights } from 'api/generated/services/metrics';
|
import { useGetMetricHighlights } from 'api/generated/services/metrics';
|
||||||
import { InfoIcon } from 'lucide-react';
|
import { InfoIcon } from 'lucide-react';
|
||||||
|
|
||||||
@@ -39,17 +39,6 @@ function Highlights({ metricName }: HighlightsProps): JSX.Element {
|
|||||||
metricHighlights?.lastReceived,
|
metricHighlights?.lastReceived,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (isLoadingMetricHighlights) {
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
className="metric-details-content-grid"
|
|
||||||
data-testid="metric-highlights-loading-state"
|
|
||||||
>
|
|
||||||
<Skeleton title={false} paragraph={{ rows: 2 }} active />
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isErrorMetricHighlights) {
|
if (isErrorMetricHighlights) {
|
||||||
return (
|
return (
|
||||||
<div className="metric-details-content-grid">
|
<div className="metric-details-content-grid">
|
||||||
@@ -89,32 +78,41 @@ function Highlights({ metricName }: HighlightsProps): JSX.Element {
|
|||||||
</Typography.Text>
|
</Typography.Text>
|
||||||
</div>
|
</div>
|
||||||
<div className="values-row">
|
<div className="values-row">
|
||||||
<Typography.Text
|
{isLoadingMetricHighlights ? (
|
||||||
className="metric-details-grid-value"
|
<div className="metric-highlights-loading-inline">
|
||||||
data-testid="metric-highlights-data-points"
|
<Spin size="small" />
|
||||||
>
|
<Typography.Text type="secondary">Loading metric stats</Typography.Text>
|
||||||
<Tooltip title={metricHighlights?.dataPoints?.toLocaleString()}>
|
</div>
|
||||||
{formatNumberIntoHumanReadableFormat(metricHighlights?.dataPoints ?? 0)}
|
) : (
|
||||||
</Tooltip>
|
<>
|
||||||
</Typography.Text>
|
<Typography.Text
|
||||||
<Typography.Text
|
className="metric-details-grid-value"
|
||||||
className="metric-details-grid-value"
|
data-testid="metric-highlights-data-points"
|
||||||
data-testid="metric-highlights-time-series-total"
|
>
|
||||||
>
|
<Tooltip title={metricHighlights?.dataPoints?.toLocaleString()}>
|
||||||
<Tooltip
|
{formatNumberIntoHumanReadableFormat(metricHighlights?.dataPoints ?? 0)}
|
||||||
title="Active time series are those that have received data points in the last 1
|
</Tooltip>
|
||||||
hour."
|
</Typography.Text>
|
||||||
placement="top"
|
<Typography.Text
|
||||||
>
|
className="metric-details-grid-value"
|
||||||
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
data-testid="metric-highlights-time-series-total"
|
||||||
</Tooltip>
|
>
|
||||||
</Typography.Text>
|
<Tooltip
|
||||||
<Typography.Text
|
title="Active time series are those that have received data points in the last 1
|
||||||
className="metric-details-grid-value"
|
hour."
|
||||||
data-testid="metric-highlights-last-received"
|
placement="top"
|
||||||
>
|
>
|
||||||
<Tooltip title={lastReceivedText}>{lastReceivedText}</Tooltip>
|
<span>{`${timeSeriesTotal} total ⎯ ${timeSeriesActive} active`}</span>
|
||||||
</Typography.Text>
|
</Tooltip>
|
||||||
|
</Typography.Text>
|
||||||
|
<Typography.Text
|
||||||
|
className="metric-details-grid-value"
|
||||||
|
data-testid="metric-highlights-last-received"
|
||||||
|
>
|
||||||
|
<Tooltip title={lastReceivedText}>{lastReceivedText}</Tooltip>
|
||||||
|
</Typography.Text>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||||
import { useQueryClient } from 'react-query';
|
import { useQueryClient } from 'react-query';
|
||||||
import { Button, Collapse, Input, Select, Skeleton, Typography } from 'antd';
|
import { Button, Collapse, Input, Select, Spin, Typography } from 'antd';
|
||||||
import { ColumnsType } from 'antd/es/table';
|
import { ColumnsType } from 'antd/es/table';
|
||||||
import logEvent from 'api/common/logEvent';
|
import logEvent from 'api/common/logEvent';
|
||||||
import {
|
import {
|
||||||
@@ -334,7 +334,7 @@ function Metadata({
|
|||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
setIsEditing(true);
|
setIsEditing(true);
|
||||||
}}
|
}}
|
||||||
disabled={isUpdatingMetricsMetadata}
|
disabled={isUpdatingMetricsMetadata || isLoadingMetricMetadata}
|
||||||
>
|
>
|
||||||
<Edit2 size={14} />
|
<Edit2 size={14} />
|
||||||
<Typography.Text>Edit</Typography.Text>
|
<Typography.Text>Edit</Typography.Text>
|
||||||
@@ -345,6 +345,7 @@ function Metadata({
|
|||||||
isEditing,
|
isEditing,
|
||||||
isErrorMetricMetadata,
|
isErrorMetricMetadata,
|
||||||
isUpdatingMetricsMetadata,
|
isUpdatingMetricsMetadata,
|
||||||
|
isLoadingMetricMetadata,
|
||||||
cancelEdit,
|
cancelEdit,
|
||||||
handleSave,
|
handleSave,
|
||||||
]);
|
]);
|
||||||
@@ -359,7 +360,11 @@ function Metadata({
|
|||||||
</div>
|
</div>
|
||||||
),
|
),
|
||||||
key: 'metric-metadata',
|
key: 'metric-metadata',
|
||||||
children: isErrorMetricMetadata ? (
|
children: isLoadingMetricMetadata ? (
|
||||||
|
<div className="metrics-accordion-loading-state">
|
||||||
|
<Spin size="small" />
|
||||||
|
</div>
|
||||||
|
) : isErrorMetricMetadata ? (
|
||||||
<div className="metric-metadata-error-state">
|
<div className="metric-metadata-error-state">
|
||||||
<MetricDetailsErrorState
|
<MetricDetailsErrorState
|
||||||
refetch={refetchMetricMetadata}
|
refetch={refetchMetricMetadata}
|
||||||
@@ -381,20 +386,13 @@ function Metadata({
|
|||||||
[
|
[
|
||||||
actionButton,
|
actionButton,
|
||||||
columns,
|
columns,
|
||||||
|
isLoadingMetricMetadata,
|
||||||
isErrorMetricMetadata,
|
isErrorMetricMetadata,
|
||||||
refetchMetricMetadata,
|
refetchMetricMetadata,
|
||||||
tableData,
|
tableData,
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
if (isLoadingMetricMetadata) {
|
|
||||||
return (
|
|
||||||
<div className="metrics-metadata-skeleton-container">
|
|
||||||
<Skeleton active paragraph={{ rows: 8 }} />
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Collapse
|
<Collapse
|
||||||
bordered
|
bordered
|
||||||
|
|||||||
@@ -52,6 +52,13 @@
|
|||||||
align-items: center;
|
align-items: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.metric-highlights-loading-inline {
|
||||||
|
grid-column: 1 / -1;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
.metric-highlights-error-state {
|
.metric-highlights-error-state {
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: 8px;
|
gap: 8px;
|
||||||
@@ -120,12 +127,11 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.metrics-metadata-skeleton-container {
|
.metrics-accordion-loading-state {
|
||||||
height: 330px;
|
display: flex;
|
||||||
}
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
.all-attributes-skeleton-container {
|
padding: 24px;
|
||||||
height: 600px;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.metrics-accordion {
|
.metrics-accordion {
|
||||||
@@ -153,6 +159,18 @@
|
|||||||
justify-content: space-between;
|
justify-content: space-between;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
height: 36px;
|
height: 36px;
|
||||||
|
|
||||||
|
.all-attributes-header-title {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 6px;
|
||||||
|
|
||||||
|
.lucide-info {
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--bg-vanilla-400);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.ant-typography {
|
.ant-typography {
|
||||||
font-family: 'Geist Mono';
|
font-family: 'Geist Mono';
|
||||||
color: var(--bg-robin-400);
|
color: var(--bg-robin-400);
|
||||||
@@ -186,6 +204,7 @@
|
|||||||
.all-attributes-key {
|
.all-attributes-key {
|
||||||
display: flex;
|
display: flex;
|
||||||
justify-content: space-between;
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
.ant-btn {
|
.ant-btn {
|
||||||
.ant-typography:first-child {
|
.ant-typography:first-child {
|
||||||
font-family: 'Geist Mono';
|
font-family: 'Geist Mono';
|
||||||
@@ -193,17 +212,15 @@
|
|||||||
background-color: transparent;
|
background-color: transparent;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
.copy-feedback {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
color: var(--bg-forest-500);
|
||||||
|
animation: fade-in-out 1.5s ease-in-out;
|
||||||
|
}
|
||||||
.all-attributes-contribution {
|
.all-attributes-contribution {
|
||||||
font-family: 'Geist Mono';
|
font-family: 'Geist Mono';
|
||||||
color: var(--bg-vanilla-400);
|
color: var(--bg-vanilla-400);
|
||||||
background-color: rgba(171, 189, 255, 0.1);
|
|
||||||
height: 24px;
|
|
||||||
min-width: 24px;
|
|
||||||
border-radius: 50%;
|
|
||||||
text-align: center;
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -228,6 +245,18 @@
|
|||||||
background: var(--bg-slate-300);
|
background: var(--bg-slate-300);
|
||||||
border-radius: 1px;
|
border-radius: 1px;
|
||||||
}
|
}
|
||||||
|
.all-attributes-value-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 4px;
|
||||||
|
|
||||||
|
.copy-feedback {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
color: var(--bg-forest-500);
|
||||||
|
animation: fade-in-out 1.5s ease-in-out;
|
||||||
|
}
|
||||||
|
}
|
||||||
.ant-btn {
|
.ant-btn {
|
||||||
text-align: left;
|
text-align: left;
|
||||||
width: fit-content;
|
width: fit-content;
|
||||||
@@ -259,10 +288,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.metric-metadata-key {
|
.metric-metadata-key {
|
||||||
cursor: pointer;
|
|
||||||
padding-left: 10px;
|
padding-left: 10px;
|
||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
text-align: center;
|
|
||||||
.field-renderer-container {
|
.field-renderer-container {
|
||||||
.label {
|
.label {
|
||||||
color: var(--bg-vanilla-400);
|
color: var(--bg-vanilla-400);
|
||||||
@@ -383,9 +410,8 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.all-attributes-key {
|
.all-attributes-key {
|
||||||
.ant-typography:last-child {
|
.all-attributes-contribution {
|
||||||
color: var(--bg-vanilla-200);
|
color: var(--bg-slate-400);
|
||||||
background-color: var(--bg-robin-300);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -448,3 +474,146 @@
|
|||||||
height: 100%;
|
height: 100%;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.attribute-key-popover-overlay {
|
||||||
|
.ant-popover-inner {
|
||||||
|
padding: 0 !important;
|
||||||
|
border-radius: 4px;
|
||||||
|
border: 1px solid var(--bg-slate-400);
|
||||||
|
background: linear-gradient(
|
||||||
|
139deg,
|
||||||
|
rgba(18, 19, 23, 0.8) 0%,
|
||||||
|
rgba(18, 19, 23, 0.9) 98.68%
|
||||||
|
);
|
||||||
|
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
backdrop-filter: blur(20px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.ant-menu {
|
||||||
|
font-size: 12px;
|
||||||
|
background: transparent;
|
||||||
|
|
||||||
|
.ant-menu-item {
|
||||||
|
height: 32px;
|
||||||
|
line-height: 32px;
|
||||||
|
padding: 0 12px;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.all-values-popover-overlay {
|
||||||
|
.ant-popover-inner {
|
||||||
|
padding: 0 !important;
|
||||||
|
border-radius: 4px;
|
||||||
|
border: 1px solid var(--bg-slate-400);
|
||||||
|
background: linear-gradient(
|
||||||
|
139deg,
|
||||||
|
rgba(18, 19, 23, 0.8) 0%,
|
||||||
|
rgba(18, 19, 23, 0.9) 98.68%
|
||||||
|
);
|
||||||
|
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
backdrop-filter: blur(20px);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.all-values-popover {
|
||||||
|
width: 320px;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 8px;
|
||||||
|
padding: 12px;
|
||||||
|
|
||||||
|
.all-values-list {
|
||||||
|
max-height: 300px;
|
||||||
|
overflow-y: auto;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 4px;
|
||||||
|
|
||||||
|
&::-webkit-scrollbar {
|
||||||
|
width: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
&::-webkit-scrollbar-track {
|
||||||
|
background: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
&::-webkit-scrollbar-thumb {
|
||||||
|
background: var(--bg-slate-300);
|
||||||
|
border-radius: 1px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.all-values-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
padding: 4px 8px;
|
||||||
|
border-radius: 4px;
|
||||||
|
gap: 8px;
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
background: rgba(255, 255, 255, 0.04);
|
||||||
|
}
|
||||||
|
|
||||||
|
.all-values-item-text {
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
|
font-family: 'Geist Mono';
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.all-values-item-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: 2px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
|
||||||
|
.copy-success {
|
||||||
|
color: var(--bg-forest-500);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.all-values-empty {
|
||||||
|
padding: 8px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.all-values-button {
|
||||||
|
color: var(--bg-robin-400) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.lightMode {
|
||||||
|
.attribute-key-popover-overlay,
|
||||||
|
.all-values-popover-overlay {
|
||||||
|
.ant-popover-inner {
|
||||||
|
border: 1px solid var(--bg-vanilla-400);
|
||||||
|
background: var(--bg-vanilla-100) !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.all-values-popover {
|
||||||
|
.all-values-item {
|
||||||
|
&:hover {
|
||||||
|
background: rgba(0, 0, 0, 0.04);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes fade-in-out {
|
||||||
|
0% {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
15% {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
85% {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
100% {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,10 +1,14 @@
|
|||||||
import { useCallback, useEffect, useMemo } from 'react';
|
import { useCallback, useEffect, useMemo } from 'react';
|
||||||
|
// eslint-disable-next-line no-restricted-imports
|
||||||
|
import { useSelector } from 'react-redux';
|
||||||
import { Color } from '@signozhq/design-tokens';
|
import { Color } from '@signozhq/design-tokens';
|
||||||
import { Button, Divider, Drawer, Typography } from 'antd';
|
import { Button, Divider, Drawer, Typography } from 'antd';
|
||||||
import logEvent from 'api/common/logEvent';
|
import logEvent from 'api/common/logEvent';
|
||||||
import { useGetMetricMetadata } from 'api/generated/services/metrics';
|
import { useGetMetricMetadata } from 'api/generated/services/metrics';
|
||||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||||
import { Compass, Crosshair, X } from 'lucide-react';
|
import { Compass, Crosshair, X } from 'lucide-react';
|
||||||
|
import { AppState } from 'store/reducers';
|
||||||
|
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||||
|
|
||||||
import { PANEL_TYPES } from '../../../constants/queryBuilder';
|
import { PANEL_TYPES } from '../../../constants/queryBuilder';
|
||||||
import ROUTES from '../../../constants/routes';
|
import ROUTES from '../../../constants/routes';
|
||||||
@@ -29,6 +33,9 @@ function MetricDetails({
|
|||||||
}: MetricDetailsProps): JSX.Element {
|
}: MetricDetailsProps): JSX.Element {
|
||||||
const isDarkMode = useIsDarkMode();
|
const isDarkMode = useIsDarkMode();
|
||||||
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
const { handleExplorerTabChange } = useHandleExplorerTabChange();
|
||||||
|
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||||
|
(state) => state.globalTime,
|
||||||
|
);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: metricMetadataResponse,
|
data: metricMetadataResponse,
|
||||||
@@ -73,7 +80,14 @@ function MetricDetails({
|
|||||||
|
|
||||||
const goToMetricsExplorerwithSelectedMetric = useCallback(() => {
|
const goToMetricsExplorerwithSelectedMetric = useCallback(() => {
|
||||||
if (metricName) {
|
if (metricName) {
|
||||||
const compositeQuery = getMetricDetailsQuery(metricName, metadata?.type);
|
const compositeQuery = getMetricDetailsQuery(
|
||||||
|
metricName,
|
||||||
|
metadata?.type,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
metadata?.isMonotonic,
|
||||||
|
);
|
||||||
handleExplorerTabChange(
|
handleExplorerTabChange(
|
||||||
PANEL_TYPES.TIME_SERIES,
|
PANEL_TYPES.TIME_SERIES,
|
||||||
{
|
{
|
||||||
@@ -82,6 +96,7 @@ function MetricDetails({
|
|||||||
id: metricName,
|
id: metricName,
|
||||||
},
|
},
|
||||||
ROUTES.METRICS_EXPLORER_EXPLORER,
|
ROUTES.METRICS_EXPLORER_EXPLORER,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
logEvent(MetricsExplorerEvents.OpenInExplorerClicked, {
|
logEvent(MetricsExplorerEvents.OpenInExplorerClicked, {
|
||||||
[MetricsExplorerEventKeys.MetricName]: metricName,
|
[MetricsExplorerEventKeys.MetricName]: metricName,
|
||||||
@@ -89,7 +104,12 @@ function MetricDetails({
|
|||||||
[MetricsExplorerEventKeys.Modal]: 'metric-details',
|
[MetricsExplorerEventKeys.Modal]: 'metric-details',
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, [metricName, handleExplorerTabChange, metadata?.type]);
|
}, [
|
||||||
|
metricName,
|
||||||
|
handleExplorerTabChange,
|
||||||
|
metadata?.type,
|
||||||
|
metadata?.isMonotonic,
|
||||||
|
]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
logEvent(MetricsExplorerEvents.ModalOpened, {
|
logEvent(MetricsExplorerEvents.ModalOpened, {
|
||||||
@@ -100,6 +120,21 @@ function MetricDetails({
|
|||||||
const isActionButtonDisabled =
|
const isActionButtonDisabled =
|
||||||
!metricName || isLoadingMetricMetadata || isErrorMetricMetadata;
|
!metricName || isLoadingMetricMetadata || isErrorMetricMetadata;
|
||||||
|
|
||||||
|
const handleDrawerClose = useCallback(
|
||||||
|
(e: React.MouseEvent | React.KeyboardEvent): void => {
|
||||||
|
if ('key' in e && e.key === 'Escape') {
|
||||||
|
const openPopover = document.querySelector(
|
||||||
|
'.metric-details-popover:not(.ant-popover-hidden)',
|
||||||
|
);
|
||||||
|
if (openPopover) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
onClose();
|
||||||
|
},
|
||||||
|
[onClose],
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Drawer
|
<Drawer
|
||||||
width="60%"
|
width="60%"
|
||||||
@@ -137,7 +172,7 @@ function MetricDetails({
|
|||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
placement="right"
|
placement="right"
|
||||||
onClose={onClose}
|
onClose={handleDrawerClose}
|
||||||
open={isOpen}
|
open={isOpen}
|
||||||
style={{
|
style={{
|
||||||
overscrollBehavior: 'contain',
|
overscrollBehavior: 'contain',
|
||||||
@@ -157,7 +192,13 @@ function MetricDetails({
|
|||||||
isLoadingMetricMetadata={isLoadingMetricMetadata}
|
isLoadingMetricMetadata={isLoadingMetricMetadata}
|
||||||
refetchMetricMetadata={refetchMetricMetadata}
|
refetchMetricMetadata={refetchMetricMetadata}
|
||||||
/>
|
/>
|
||||||
<AllAttributes metricName={metricName} metricType={metadata?.type} />
|
<AllAttributes
|
||||||
|
metricName={metricName}
|
||||||
|
metricType={metadata?.type}
|
||||||
|
isMonotonic={metadata?.isMonotonic}
|
||||||
|
minTime={minTime}
|
||||||
|
maxTime={maxTime}
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
</Drawer>
|
</Drawer>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
import * as reactUseHooks from 'react-use';
|
|
||||||
import { render, screen } from '@testing-library/react';
|
import { render, screen } from '@testing-library/react';
|
||||||
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
import * as metricsExplorerHooks from 'api/generated/services/metrics';
|
||||||
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||||
import * as useHandleExplorerTabChange from 'hooks/useHandleExplorerTabChange';
|
|
||||||
import { userEvent } from 'tests/test-utils';
|
import { userEvent } from 'tests/test-utils';
|
||||||
|
|
||||||
import ROUTES from '../../../../constants/routes';
|
import ROUTES from '../../../../constants/routes';
|
||||||
import AllAttributes, { AllAttributesValue } from '../AllAttributes';
|
import AllAttributes from '../AllAttributes';
|
||||||
|
import { AllAttributesValue } from '../AllAttributesValue';
|
||||||
import { getMockMetricAttributesData, MOCK_METRIC_NAME } from './testUtlls';
|
import { getMockMetricAttributesData, MOCK_METRIC_NAME } from './testUtlls';
|
||||||
|
|
||||||
jest.mock('react-router-dom', () => ({
|
jest.mock('react-router-dom', () => ({
|
||||||
@@ -15,17 +14,6 @@ jest.mock('react-router-dom', () => ({
|
|||||||
pathname: `${ROUTES.METRICS_EXPLORER}`,
|
pathname: `${ROUTES.METRICS_EXPLORER}`,
|
||||||
}),
|
}),
|
||||||
}));
|
}));
|
||||||
const mockHandleExplorerTabChange = jest.fn();
|
|
||||||
jest
|
|
||||||
.spyOn(useHandleExplorerTabChange, 'useHandleExplorerTabChange')
|
|
||||||
.mockReturnValue({
|
|
||||||
handleExplorerTabChange: mockHandleExplorerTabChange,
|
|
||||||
});
|
|
||||||
|
|
||||||
const mockUseCopyToClipboard = jest.fn();
|
|
||||||
jest
|
|
||||||
.spyOn(reactUseHooks, 'useCopyToClipboard')
|
|
||||||
.mockReturnValue([{ value: 'value1' }, mockUseCopyToClipboard] as any);
|
|
||||||
|
|
||||||
const useGetMetricAttributesMock = jest.spyOn(
|
const useGetMetricAttributesMock = jest.spyOn(
|
||||||
metricsExplorerHooks,
|
metricsExplorerHooks,
|
||||||
@@ -34,12 +22,13 @@ const useGetMetricAttributesMock = jest.spyOn(
|
|||||||
|
|
||||||
describe('AllAttributes', () => {
|
describe('AllAttributes', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
useGetMetricAttributesMock.mockReturnValue({
|
useGetMetricAttributesMock.mockReturnValue({
|
||||||
...getMockMetricAttributesData(),
|
...getMockMetricAttributesData(),
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders attributes section with title', () => {
|
it('renders attribute keys, values, and value counts from API data', () => {
|
||||||
render(
|
render(
|
||||||
<AllAttributes
|
<AllAttributes
|
||||||
metricName={MOCK_METRIC_NAME}
|
metricName={MOCK_METRIC_NAME}
|
||||||
@@ -47,39 +36,13 @@ describe('AllAttributes', () => {
|
|||||||
/>,
|
/>,
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(screen.getByText('All Attributes')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders all attribute keys and values', () => {
|
|
||||||
render(
|
|
||||||
<AllAttributes
|
|
||||||
metricName={MOCK_METRIC_NAME}
|
|
||||||
metricType={MetrictypesTypeDTO.gauge}
|
|
||||||
/>,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Check attribute keys are rendered
|
|
||||||
expect(screen.getByText('attribute1')).toBeInTheDocument();
|
expect(screen.getByText('attribute1')).toBeInTheDocument();
|
||||||
expect(screen.getByText('attribute2')).toBeInTheDocument();
|
expect(screen.getByText('attribute2')).toBeInTheDocument();
|
||||||
|
|
||||||
// Check attribute values are rendered
|
|
||||||
expect(screen.getByText('value1')).toBeInTheDocument();
|
expect(screen.getByText('value1')).toBeInTheDocument();
|
||||||
expect(screen.getByText('value2')).toBeInTheDocument();
|
expect(screen.getByText('value2')).toBeInTheDocument();
|
||||||
expect(screen.getByText('value3')).toBeInTheDocument();
|
expect(screen.getByText('value3')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders value counts correctly', () => {
|
|
||||||
render(
|
|
||||||
<AllAttributes
|
|
||||||
metricName={MOCK_METRIC_NAME}
|
|
||||||
metricType={MetrictypesTypeDTO.gauge}
|
|
||||||
/>,
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(screen.getByText('2')).toBeInTheDocument(); // For attribute1
|
|
||||||
expect(screen.getByText('1')).toBeInTheDocument(); // For attribute2
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles empty attributes array', () => {
|
it('handles empty attributes array', () => {
|
||||||
useGetMetricAttributesMock.mockReturnValue({
|
useGetMetricAttributesMock.mockReturnValue({
|
||||||
...getMockMetricAttributesData({
|
...getMockMetricAttributesData({
|
||||||
@@ -100,7 +63,7 @@ describe('AllAttributes', () => {
|
|||||||
expect(screen.getByText('No attributes found')).toBeInTheDocument();
|
expect(screen.getByText('No attributes found')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('clicking on an attribute key opens the explorer with the attribute filter applied', async () => {
|
it('clicking on an attribute key shows popover with Open in Metric Explorer option', async () => {
|
||||||
render(
|
render(
|
||||||
<AllAttributes
|
<AllAttributes
|
||||||
metricName={MOCK_METRIC_NAME}
|
metricName={MOCK_METRIC_NAME}
|
||||||
@@ -108,7 +71,8 @@ describe('AllAttributes', () => {
|
|||||||
/>,
|
/>,
|
||||||
);
|
);
|
||||||
await userEvent.click(screen.getByText('attribute1'));
|
await userEvent.click(screen.getByText('attribute1'));
|
||||||
expect(mockHandleExplorerTabChange).toHaveBeenCalled();
|
expect(screen.getByText('Open in Metric Explorer')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('Copy Key')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('filters attributes based on search input', async () => {
|
it('filters attributes based on search input', async () => {
|
||||||
@@ -123,26 +87,66 @@ describe('AllAttributes', () => {
|
|||||||
expect(screen.getByText('attribute1')).toBeInTheDocument();
|
expect(screen.getByText('attribute1')).toBeInTheDocument();
|
||||||
expect(screen.getByText('value1')).toBeInTheDocument();
|
expect(screen.getByText('value1')).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('shows error state when attribute fetching fails', () => {
|
||||||
|
useGetMetricAttributesMock.mockReturnValue({
|
||||||
|
...getMockMetricAttributesData(
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
attributes: [],
|
||||||
|
totalKeys: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
isError: true,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
});
|
||||||
|
render(
|
||||||
|
<AllAttributes
|
||||||
|
metricName={MOCK_METRIC_NAME}
|
||||||
|
metricType={MetrictypesTypeDTO.gauge}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(
|
||||||
|
screen.getByText('Something went wrong while fetching attributes'),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not show misleading empty text while loading', () => {
|
||||||
|
useGetMetricAttributesMock.mockReturnValue({
|
||||||
|
...getMockMetricAttributesData(
|
||||||
|
{
|
||||||
|
data: {
|
||||||
|
attributes: [],
|
||||||
|
totalKeys: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
isLoading: true,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
});
|
||||||
|
render(
|
||||||
|
<AllAttributes
|
||||||
|
metricName={MOCK_METRIC_NAME}
|
||||||
|
metricType={MetrictypesTypeDTO.gauge}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.queryByText('No attributes found')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('AllAttributesValue', () => {
|
describe('AllAttributesValue', () => {
|
||||||
const mockGoToMetricsExploreWithAppliedAttribute = jest.fn();
|
const mockGoToMetricsExploreWithAppliedAttribute = jest.fn();
|
||||||
|
|
||||||
it('renders all attribute values', () => {
|
beforeEach(() => {
|
||||||
render(
|
jest.clearAllMocks();
|
||||||
<AllAttributesValue
|
|
||||||
filterKey="attribute1"
|
|
||||||
filterValue={['value1', 'value2']}
|
|
||||||
goToMetricsExploreWithAppliedAttribute={
|
|
||||||
mockGoToMetricsExploreWithAppliedAttribute
|
|
||||||
}
|
|
||||||
/>,
|
|
||||||
);
|
|
||||||
expect(screen.getByText('value1')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('value2')).toBeInTheDocument();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('loads more attributes when show more button is clicked', async () => {
|
it('shows All values button when there are more than 5 values', () => {
|
||||||
render(
|
render(
|
||||||
<AllAttributesValue
|
<AllAttributesValue
|
||||||
filterKey="attribute1"
|
filterKey="attribute1"
|
||||||
@@ -153,58 +157,59 @@ describe('AllAttributesValue', () => {
|
|||||||
/>,
|
/>,
|
||||||
);
|
);
|
||||||
expect(screen.queryByText('value6')).not.toBeInTheDocument();
|
expect(screen.queryByText('value6')).not.toBeInTheDocument();
|
||||||
await userEvent.click(screen.getByText('Show More'));
|
expect(screen.getByText('All values (6)')).toBeInTheDocument();
|
||||||
expect(screen.getByText('value6')).toBeInTheDocument();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('does not render show more button when there are no more attributes to show', () => {
|
it('All values popover shows values beyond the initial 5', async () => {
|
||||||
render(
|
const values = [
|
||||||
<AllAttributesValue
|
|
||||||
filterKey="attribute1"
|
|
||||||
filterValue={['value1', 'value2']}
|
|
||||||
goToMetricsExploreWithAppliedAttribute={
|
|
||||||
mockGoToMetricsExploreWithAppliedAttribute
|
|
||||||
}
|
|
||||||
/>,
|
|
||||||
);
|
|
||||||
expect(screen.queryByText('Show More')).not.toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('copy button should copy the attribute value to the clipboard', async () => {
|
|
||||||
render(
|
|
||||||
<AllAttributesValue
|
|
||||||
filterKey="attribute1"
|
|
||||||
filterValue={['value1', 'value2']}
|
|
||||||
goToMetricsExploreWithAppliedAttribute={
|
|
||||||
mockGoToMetricsExploreWithAppliedAttribute
|
|
||||||
}
|
|
||||||
/>,
|
|
||||||
);
|
|
||||||
expect(screen.getByText('value1')).toBeInTheDocument();
|
|
||||||
await userEvent.click(screen.getByText('value1'));
|
|
||||||
expect(screen.getByText('Copy Attribute')).toBeInTheDocument();
|
|
||||||
await userEvent.click(screen.getByText('Copy Attribute'));
|
|
||||||
expect(mockUseCopyToClipboard).toHaveBeenCalledWith('value1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('explorer button should go to metrics explore with the attribute filter applied', async () => {
|
|
||||||
render(
|
|
||||||
<AllAttributesValue
|
|
||||||
filterKey="attribute1"
|
|
||||||
filterValue={['value1', 'value2']}
|
|
||||||
goToMetricsExploreWithAppliedAttribute={
|
|
||||||
mockGoToMetricsExploreWithAppliedAttribute
|
|
||||||
}
|
|
||||||
/>,
|
|
||||||
);
|
|
||||||
expect(screen.getByText('value1')).toBeInTheDocument();
|
|
||||||
await userEvent.click(screen.getByText('value1'));
|
|
||||||
|
|
||||||
expect(screen.getByText('Open in Explorer')).toBeInTheDocument();
|
|
||||||
await userEvent.click(screen.getByText('Open in Explorer'));
|
|
||||||
expect(mockGoToMetricsExploreWithAppliedAttribute).toHaveBeenCalledWith(
|
|
||||||
'attribute1',
|
|
||||||
'value1',
|
'value1',
|
||||||
|
'value2',
|
||||||
|
'value3',
|
||||||
|
'value4',
|
||||||
|
'value5',
|
||||||
|
'value6',
|
||||||
|
'value7',
|
||||||
|
];
|
||||||
|
render(
|
||||||
|
<AllAttributesValue
|
||||||
|
filterKey="attribute1"
|
||||||
|
filterValue={values}
|
||||||
|
goToMetricsExploreWithAppliedAttribute={
|
||||||
|
mockGoToMetricsExploreWithAppliedAttribute
|
||||||
|
}
|
||||||
|
/>,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
await userEvent.click(screen.getByText('All values (7)'));
|
||||||
|
|
||||||
|
expect(screen.getByText('value6')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('value7')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('All values popover search filters the value list', async () => {
|
||||||
|
const values = [
|
||||||
|
'alpha',
|
||||||
|
'bravo',
|
||||||
|
'charlie',
|
||||||
|
'delta',
|
||||||
|
'echo',
|
||||||
|
'fig-special',
|
||||||
|
'golf-target',
|
||||||
|
];
|
||||||
|
render(
|
||||||
|
<AllAttributesValue
|
||||||
|
filterKey="attribute1"
|
||||||
|
filterValue={values}
|
||||||
|
goToMetricsExploreWithAppliedAttribute={
|
||||||
|
mockGoToMetricsExploreWithAppliedAttribute
|
||||||
|
}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
await userEvent.click(screen.getByText('All values (7)'));
|
||||||
|
await userEvent.type(screen.getByPlaceholderText('Search values'), 'golf');
|
||||||
|
|
||||||
|
expect(screen.getByText('golf-target')).toBeInTheDocument();
|
||||||
|
expect(screen.queryByText('fig-special')).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -14,12 +14,8 @@ import {
|
|||||||
MOCK_METRIC_NAME,
|
MOCK_METRIC_NAME,
|
||||||
} from './testUtlls';
|
} from './testUtlls';
|
||||||
|
|
||||||
const mockSafeNavigate = jest.fn();
|
const mockWindowOpen = jest.fn();
|
||||||
jest.mock('hooks/useSafeNavigate', () => ({
|
Object.defineProperty(window, 'open', { value: mockWindowOpen });
|
||||||
useSafeNavigate: (): any => ({
|
|
||||||
safeNavigate: mockSafeNavigate,
|
|
||||||
}),
|
|
||||||
}));
|
|
||||||
const mockSetQuery = jest.fn();
|
const mockSetQuery = jest.fn();
|
||||||
const mockUrlQuery = {
|
const mockUrlQuery = {
|
||||||
set: mockSetQuery,
|
set: mockSetQuery,
|
||||||
@@ -43,6 +39,7 @@ describe('DashboardsAndAlertsPopover', () => {
|
|||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
useGetMetricAlertsMock.mockReturnValue(getMockAlertsData());
|
useGetMetricAlertsMock.mockReturnValue(getMockAlertsData());
|
||||||
useGetMetricDashboardsMock.mockReturnValue(getMockDashboardsData());
|
useGetMetricDashboardsMock.mockReturnValue(getMockDashboardsData());
|
||||||
|
mockWindowOpen.mockClear();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders the popover correctly with multiple dashboards and alerts', () => {
|
it('renders the popover correctly with multiple dashboards and alerts', () => {
|
||||||
@@ -140,9 +137,10 @@ describe('DashboardsAndAlertsPopover', () => {
|
|||||||
// Click on the first dashboard
|
// Click on the first dashboard
|
||||||
await userEvent.click(screen.getByText(MOCK_DASHBOARD_1.dashboardName));
|
await userEvent.click(screen.getByText(MOCK_DASHBOARD_1.dashboardName));
|
||||||
|
|
||||||
// Should navigate to the dashboard
|
// Should open dashboard in new tab
|
||||||
expect(mockSafeNavigate).toHaveBeenCalledWith(
|
expect(mockWindowOpen).toHaveBeenCalledWith(
|
||||||
`/dashboard/${MOCK_DASHBOARD_1.dashboardId}`,
|
`/dashboard/${MOCK_DASHBOARD_1.dashboardId}`,
|
||||||
|
'_blank',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -158,11 +156,12 @@ describe('DashboardsAndAlertsPopover', () => {
|
|||||||
// Click on the first alert rule
|
// Click on the first alert rule
|
||||||
await userEvent.click(screen.getByText(MOCK_ALERT_1.alertName));
|
await userEvent.click(screen.getByText(MOCK_ALERT_1.alertName));
|
||||||
|
|
||||||
// Should navigate to the alert rule
|
// Should open alert in new tab
|
||||||
expect(mockSetQuery).toHaveBeenCalledWith(
|
expect(mockSetQuery).toHaveBeenCalledWith(
|
||||||
QueryParams.ruleId,
|
QueryParams.ruleId,
|
||||||
MOCK_ALERT_1.alertId,
|
MOCK_ALERT_1.alertId,
|
||||||
);
|
);
|
||||||
|
expect(mockWindowOpen).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders unique dashboards even when there are duplicates', async () => {
|
it('renders unique dashboards even when there are duplicates', async () => {
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ describe('Highlights', () => {
|
|||||||
).toBeInTheDocument();
|
).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should render loading state when data is loading', () => {
|
it('should show labels and loading text but not stale data values while loading', () => {
|
||||||
useGetMetricHighlightsMock.mockReturnValue(
|
useGetMetricHighlightsMock.mockReturnValue(
|
||||||
getMockMetricHighlightsData(
|
getMockMetricHighlightsData(
|
||||||
{},
|
{},
|
||||||
@@ -60,8 +60,19 @@ describe('Highlights', () => {
|
|||||||
|
|
||||||
render(<Highlights metricName={MOCK_METRIC_NAME} />);
|
render(<Highlights metricName={MOCK_METRIC_NAME} />);
|
||||||
|
|
||||||
|
expect(screen.getByText('SAMPLES')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('TIME SERIES')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('LAST RECEIVED')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('Loading metric stats')).toBeInTheDocument();
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
screen.getByTestId('metric-highlights-loading-state'),
|
screen.queryByTestId('metric-highlights-data-points'),
|
||||||
).toBeInTheDocument();
|
).not.toBeInTheDocument();
|
||||||
|
expect(
|
||||||
|
screen.queryByTestId('metric-highlights-time-series-total'),
|
||||||
|
).not.toBeInTheDocument();
|
||||||
|
expect(
|
||||||
|
screen.queryByTestId('metric-highlights-last-received'),
|
||||||
|
).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -324,6 +324,22 @@ describe('Metadata', () => {
|
|||||||
expect(editButton2).toBeInTheDocument();
|
expect(editButton2).toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should show section header with disabled edit while loading', () => {
|
||||||
|
render(
|
||||||
|
<Metadata
|
||||||
|
metricName={MOCK_METRIC_NAME}
|
||||||
|
metadata={null}
|
||||||
|
isErrorMetricMetadata={false}
|
||||||
|
isLoadingMetricMetadata
|
||||||
|
refetchMetricMetadata={mockRefetchMetricMetadata}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByText('Metadata')).toBeInTheDocument();
|
||||||
|
const editButton = screen.getByText('Edit').closest('button');
|
||||||
|
expect(editButton).toBeDisabled();
|
||||||
|
});
|
||||||
|
|
||||||
it('should not allow editing of unit if it is already set', async () => {
|
it('should not allow editing of unit if it is already set', async () => {
|
||||||
render(
|
render(
|
||||||
<Metadata
|
<Metadata
|
||||||
|
|||||||
@@ -24,6 +24,13 @@ jest.mock('react-router-dom', () => ({
|
|||||||
pathname: `${ROUTES.METRICS_EXPLORER}`,
|
pathname: `${ROUTES.METRICS_EXPLORER}`,
|
||||||
}),
|
}),
|
||||||
}));
|
}));
|
||||||
|
jest.mock('react-redux', () => ({
|
||||||
|
...jest.requireActual('react-redux'),
|
||||||
|
useSelector: jest.fn().mockReturnValue({
|
||||||
|
maxTime: 1700000000000000000,
|
||||||
|
minTime: 1699900000000000000,
|
||||||
|
}),
|
||||||
|
}));
|
||||||
jest.mock('hooks/useSafeNavigate', () => ({
|
jest.mock('hooks/useSafeNavigate', () => ({
|
||||||
useSafeNavigate: (): any => ({
|
useSafeNavigate: (): any => ({
|
||||||
safeNavigate: jest.fn(),
|
safeNavigate: jest.fn(),
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import {
|
|||||||
MetrictypesTemporalityDTO,
|
MetrictypesTemporalityDTO,
|
||||||
MetrictypesTypeDTO,
|
MetrictypesTypeDTO,
|
||||||
} from 'api/generated/services/sigNoz.schemas';
|
} from 'api/generated/services/sigNoz.schemas';
|
||||||
|
import { ATTRIBUTE_TYPES } from 'constants/queryBuilder';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
determineIsMonotonic,
|
determineIsMonotonic,
|
||||||
@@ -139,7 +140,7 @@ describe('MetricDetails utils', () => {
|
|||||||
TEST_METRIC_NAME,
|
TEST_METRIC_NAME,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||||
MetrictypesTypeDTO.sum,
|
ATTRIBUTE_TYPES.SUM,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('rate');
|
expect(query.builder.queryData[0]?.aggregateOperator).toBe('rate');
|
||||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('rate');
|
expect(query.builder.queryData[0]?.timeAggregation).toBe('rate');
|
||||||
@@ -156,7 +157,7 @@ describe('MetricDetails utils', () => {
|
|||||||
TEST_METRIC_NAME,
|
TEST_METRIC_NAME,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||||
MetrictypesTypeDTO.gauge,
|
ATTRIBUTE_TYPES.GAUGE,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('avg');
|
expect(query.builder.queryData[0]?.aggregateOperator).toBe('avg');
|
||||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('avg');
|
expect(query.builder.queryData[0]?.timeAggregation).toBe('avg');
|
||||||
@@ -173,7 +174,7 @@ describe('MetricDetails utils', () => {
|
|||||||
TEST_METRIC_NAME,
|
TEST_METRIC_NAME,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||||
MetrictypesTypeDTO.summary,
|
ATTRIBUTE_TYPES.GAUGE,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('noop');
|
expect(query.builder.queryData[0]?.aggregateOperator).toBe('noop');
|
||||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('noop');
|
expect(query.builder.queryData[0]?.timeAggregation).toBe('noop');
|
||||||
@@ -190,7 +191,7 @@ describe('MetricDetails utils', () => {
|
|||||||
TEST_METRIC_NAME,
|
TEST_METRIC_NAME,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||||
MetrictypesTypeDTO.histogram,
|
ATTRIBUTE_TYPES.HISTOGRAM,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('noop');
|
expect(query.builder.queryData[0]?.aggregateOperator).toBe('noop');
|
||||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('noop');
|
expect(query.builder.queryData[0]?.timeAggregation).toBe('noop');
|
||||||
@@ -207,7 +208,7 @@ describe('MetricDetails utils', () => {
|
|||||||
TEST_METRIC_NAME,
|
TEST_METRIC_NAME,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
expect(query.builder.queryData[0]?.aggregateAttribute?.type).toBe(
|
||||||
MetrictypesTypeDTO.exponentialhistogram,
|
ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM,
|
||||||
);
|
);
|
||||||
expect(query.builder.queryData[0]?.aggregateOperator).toBe('noop');
|
expect(query.builder.queryData[0]?.aggregateOperator).toBe('noop');
|
||||||
expect(query.builder.queryData[0]?.timeAggregation).toBe('noop');
|
expect(query.builder.queryData[0]?.timeAggregation).toBe('noop');
|
||||||
|
|||||||
@@ -34,6 +34,9 @@ export interface MetadataProps {
|
|||||||
export interface AllAttributesProps {
|
export interface AllAttributesProps {
|
||||||
metricName: string;
|
metricName: string;
|
||||||
metricType: MetrictypesTypeDTO | undefined;
|
metricType: MetrictypesTypeDTO | undefined;
|
||||||
|
isMonotonic?: boolean;
|
||||||
|
minTime?: number;
|
||||||
|
maxTime?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AllAttributesValueProps {
|
export interface AllAttributesValueProps {
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import {
|
|||||||
MetrictypesTypeDTO,
|
MetrictypesTypeDTO,
|
||||||
} from 'api/generated/services/sigNoz.schemas';
|
} from 'api/generated/services/sigNoz.schemas';
|
||||||
import { SpaceAggregation, TimeAggregation } from 'api/v5/v5';
|
import { SpaceAggregation, TimeAggregation } from 'api/v5/v5';
|
||||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
import { initialQueriesMap, toAttributeType } from 'constants/queryBuilder';
|
||||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
import { DataSource, ReduceOperators } from 'types/common/queryBuilder';
|
import { DataSource, ReduceOperators } from 'types/common/queryBuilder';
|
||||||
@@ -87,15 +87,26 @@ export function getMetricDetailsQuery(
|
|||||||
metricType: MetrictypesTypeDTO | undefined,
|
metricType: MetrictypesTypeDTO | undefined,
|
||||||
filter?: { key: string; value: string },
|
filter?: { key: string; value: string },
|
||||||
groupBy?: string,
|
groupBy?: string,
|
||||||
|
limit?: number,
|
||||||
|
isMonotonic?: boolean,
|
||||||
): Query {
|
): Query {
|
||||||
let timeAggregation;
|
let timeAggregation;
|
||||||
let spaceAggregation;
|
let spaceAggregation;
|
||||||
let aggregateOperator;
|
let aggregateOperator;
|
||||||
|
const isNonMonotonicSum =
|
||||||
|
metricType === MetrictypesTypeDTO.sum && isMonotonic === false;
|
||||||
|
|
||||||
switch (metricType) {
|
switch (metricType) {
|
||||||
case MetrictypesTypeDTO.sum:
|
case MetrictypesTypeDTO.sum:
|
||||||
timeAggregation = 'rate';
|
if (isNonMonotonicSum) {
|
||||||
spaceAggregation = 'sum';
|
timeAggregation = 'avg';
|
||||||
aggregateOperator = 'rate';
|
spaceAggregation = 'avg';
|
||||||
|
aggregateOperator = 'avg';
|
||||||
|
} else {
|
||||||
|
timeAggregation = 'rate';
|
||||||
|
spaceAggregation = 'sum';
|
||||||
|
aggregateOperator = 'rate';
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
case MetrictypesTypeDTO.gauge:
|
case MetrictypesTypeDTO.gauge:
|
||||||
timeAggregation = 'avg';
|
timeAggregation = 'avg';
|
||||||
@@ -120,6 +131,8 @@ export function getMetricDetailsQuery(
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const attributeType = toAttributeType(metricType, isMonotonic);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...initialQueriesMap[DataSource.METRICS],
|
...initialQueriesMap[DataSource.METRICS],
|
||||||
builder: {
|
builder: {
|
||||||
@@ -128,8 +141,8 @@ export function getMetricDetailsQuery(
|
|||||||
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
|
...initialQueriesMap[DataSource.METRICS].builder.queryData[0],
|
||||||
aggregateAttribute: {
|
aggregateAttribute: {
|
||||||
key: metricName,
|
key: metricName,
|
||||||
type: metricType ?? '',
|
type: attributeType,
|
||||||
id: `${metricName}----${metricType}---string--`,
|
id: `${metricName}----${attributeType}---string--`,
|
||||||
dataType: DataTypes.String,
|
dataType: DataTypes.String,
|
||||||
},
|
},
|
||||||
aggregations: [
|
aggregations: [
|
||||||
@@ -170,6 +183,7 @@ export function getMetricDetailsQuery(
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
: [],
|
: [],
|
||||||
|
...(limit ? { limit } : {}),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
queryFormulas: [],
|
queryFormulas: [],
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import { useCallback } from 'react';
|
import { useCallback } from 'react';
|
||||||
import { Tooltip } from 'antd';
|
|
||||||
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
|
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
|
||||||
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
|
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
|
||||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||||
import { Info } from 'lucide-react';
|
|
||||||
import { DataSource } from 'types/common/queryBuilder';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
|
|
||||||
import { MetricsSearchProps } from './types';
|
import { MetricsSearchProps } from './types';
|
||||||
@@ -26,15 +24,17 @@ function MetricsSearch({
|
|||||||
onChange(currentQueryFilterExpression);
|
onChange(currentQueryFilterExpression);
|
||||||
}, [currentQueryFilterExpression, onChange]);
|
}, [currentQueryFilterExpression, onChange]);
|
||||||
|
|
||||||
|
const handleRunQuery = useCallback(
|
||||||
|
(expression: string): void => {
|
||||||
|
setCurrentQueryFilterExpression(expression);
|
||||||
|
onChange(expression);
|
||||||
|
},
|
||||||
|
[setCurrentQueryFilterExpression, onChange],
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="metrics-search-container">
|
<div className="metrics-search-container">
|
||||||
<div data-testid="qb-search-container" className="qb-search-container">
|
<div data-testid="qb-search-container" className="qb-search-container">
|
||||||
<Tooltip
|
|
||||||
title="Use filters to refine metrics based on attributes. Example: service_name=api - Shows all metrics associated with the API service"
|
|
||||||
placement="right"
|
|
||||||
>
|
|
||||||
<Info size={16} />
|
|
||||||
</Tooltip>
|
|
||||||
<QuerySearch
|
<QuerySearch
|
||||||
onChange={handleOnChange}
|
onChange={handleOnChange}
|
||||||
dataSource={DataSource.METRICS}
|
dataSource={DataSource.METRICS}
|
||||||
@@ -45,8 +45,9 @@ function MetricsSearch({
|
|||||||
expression: currentQueryFilterExpression,
|
expression: currentQueryFilterExpression,
|
||||||
},
|
},
|
||||||
}}
|
}}
|
||||||
onRun={handleOnChange}
|
onRun={handleRunQuery}
|
||||||
showFilterSuggestionsWithoutMetric
|
showFilterSuggestionsWithoutMetric
|
||||||
|
placeholder="Try metric_name CONTAINS 'http.server' to view all HTTP Server metrics being sent"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<RunQueryBtn
|
<RunQueryBtn
|
||||||
|
|||||||
@@ -37,7 +37,7 @@
|
|||||||
|
|
||||||
.metrics-search-container {
|
.metrics-search-container {
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: 16px;
|
gap: 8px;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
|
|
||||||
.metrics-search-options {
|
.metrics-search-options {
|
||||||
@@ -51,10 +51,6 @@
|
|||||||
gap: 8px;
|
gap: 8px;
|
||||||
flex: 1;
|
flex: 1;
|
||||||
|
|
||||||
.lucide-info {
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
.query-builder-search-container {
|
.query-builder-search-container {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
@@ -66,8 +62,6 @@
|
|||||||
margin-left: -16px;
|
margin-left: -16px;
|
||||||
margin-right: -16px;
|
margin-right: -16px;
|
||||||
|
|
||||||
max-height: 500px;
|
|
||||||
overflow-y: auto;
|
|
||||||
.ant-table-thead > tr > th {
|
.ant-table-thead > tr > th {
|
||||||
padding: 12px;
|
padding: 12px;
|
||||||
font-weight: 500;
|
font-weight: 500;
|
||||||
|
|||||||
@@ -15,13 +15,12 @@ import {
|
|||||||
Querybuildertypesv5OrderByDTO,
|
Querybuildertypesv5OrderByDTO,
|
||||||
Querybuildertypesv5OrderDirectionDTO,
|
Querybuildertypesv5OrderDirectionDTO,
|
||||||
} from 'api/generated/services/sigNoz.schemas';
|
} from 'api/generated/services/sigNoz.schemas';
|
||||||
import {
|
import { convertExpressionToFilters } from 'components/QueryBuilderV2/utils';
|
||||||
convertExpressionToFilters,
|
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||||
convertFiltersToExpression,
|
|
||||||
} from 'components/QueryBuilderV2/utils';
|
|
||||||
import { usePageSize } from 'container/InfraMonitoringK8s/utils';
|
import { usePageSize } from 'container/InfraMonitoringK8s/utils';
|
||||||
import NoLogs from 'container/NoLogs/NoLogs';
|
import NoLogs from 'container/NoLogs/NoLogs';
|
||||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||||
|
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||||
import { AppState } from 'store/reducers';
|
import { AppState } from 'store/reducers';
|
||||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
@@ -61,10 +60,13 @@ function Summary(): JSX.Element {
|
|||||||
heatmapView,
|
heatmapView,
|
||||||
setHeatmapView,
|
setHeatmapView,
|
||||||
] = useState<MetricsexplorertypesTreemapModeDTO>(
|
] = useState<MetricsexplorertypesTreemapModeDTO>(
|
||||||
MetricsexplorertypesTreemapModeDTO.timeseries,
|
MetricsexplorertypesTreemapModeDTO.samples,
|
||||||
);
|
);
|
||||||
|
|
||||||
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||||
|
|
||||||
|
useShareBuilderUrl({ defaultValue: initialQueriesMap[DataSource.METRICS] });
|
||||||
|
|
||||||
const query = useMemo(() => currentQuery?.builder?.queryData[0], [
|
const query = useMemo(() => currentQuery?.builder?.queryData[0], [
|
||||||
currentQuery,
|
currentQuery,
|
||||||
]);
|
]);
|
||||||
@@ -89,6 +91,15 @@ function Summary(): JSX.Element {
|
|||||||
setCurrentQueryFilterExpression,
|
setCurrentQueryFilterExpression,
|
||||||
] = useState<string>(query?.filter?.expression || '');
|
] = useState<string>(query?.filter?.expression || '');
|
||||||
|
|
||||||
|
const [appliedFilterExpression, setAppliedFilterExpression] = useState(
|
||||||
|
query?.filter?.expression || '',
|
||||||
|
);
|
||||||
|
|
||||||
|
const queryFilterExpression = useMemo(
|
||||||
|
() => ({ expression: appliedFilterExpression }),
|
||||||
|
[appliedFilterExpression],
|
||||||
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
logEvent(MetricsExplorerEvents.TabChanged, {
|
logEvent(MetricsExplorerEvents.TabChanged, {
|
||||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||||
@@ -100,11 +111,6 @@ function Summary(): JSX.Element {
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const queryFilterExpression = useMemo(() => {
|
|
||||||
const filters = query.filters || { items: [], op: 'AND' };
|
|
||||||
return convertFiltersToExpression(filters);
|
|
||||||
}, [query.filters]);
|
|
||||||
|
|
||||||
const metricsListQuery: MetricsexplorertypesStatsRequestDTO = useMemo(() => {
|
const metricsListQuery: MetricsexplorertypesStatsRequestDTO = useMemo(() => {
|
||||||
return {
|
return {
|
||||||
start: convertNanoToMilliseconds(minTime),
|
start: convertNanoToMilliseconds(minTime),
|
||||||
@@ -187,6 +193,7 @@ function Summary(): JSX.Element {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
setCurrentQueryFilterExpression(expression);
|
setCurrentQueryFilterExpression(expression);
|
||||||
|
setAppliedFilterExpression(expression);
|
||||||
setCurrentPage(1);
|
setCurrentPage(1);
|
||||||
if (expression) {
|
if (expression) {
|
||||||
logEvent(MetricsExplorerEvents.FilterApplied, {
|
logEvent(MetricsExplorerEvents.FilterApplied, {
|
||||||
@@ -305,7 +312,9 @@ function Summary(): JSX.Element {
|
|||||||
/>
|
/>
|
||||||
{showFullScreenLoading ? (
|
{showFullScreenLoading ? (
|
||||||
<MetricsLoading />
|
<MetricsLoading />
|
||||||
) : isMetricsListDataEmpty && isMetricsTreeMapDataEmpty ? (
|
) : isMetricsListDataEmpty &&
|
||||||
|
isMetricsTreeMapDataEmpty &&
|
||||||
|
!appliedFilterExpression ? (
|
||||||
<NoLogs dataSource={DataSource.METRICS} />
|
<NoLogs dataSource={DataSource.METRICS} />
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ import {
|
|||||||
AggregatorFilter,
|
AggregatorFilter,
|
||||||
GroupByFilter,
|
GroupByFilter,
|
||||||
HavingFilter,
|
HavingFilter,
|
||||||
|
MetricNameSelector,
|
||||||
OperatorsSelect,
|
OperatorsSelect,
|
||||||
OrderByFilter,
|
OrderByFilter,
|
||||||
ReduceToFilter,
|
ReduceToFilter,
|
||||||
@@ -403,7 +404,7 @@ export const Query = memo(function Query({
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
<Col flex="auto">
|
<Col flex="auto">
|
||||||
<AggregatorFilter
|
<MetricNameSelector
|
||||||
onChange={handleChangeAggregatorAttribute}
|
onChange={handleChangeAggregatorAttribute}
|
||||||
query={query}
|
query={query}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
.metric-name-selector {
|
||||||
|
.ant-select-selection-placeholder {
|
||||||
|
color: var(--bg-slate-200);
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,887 @@
|
|||||||
|
import { useEffect, useState } from 'react';
|
||||||
|
import { fireEvent, render, screen, within } from '@testing-library/react';
|
||||||
|
import {
|
||||||
|
MetricsexplorertypesListMetricDTO,
|
||||||
|
MetrictypesTypeDTO,
|
||||||
|
} from 'api/generated/services/sigNoz.schemas';
|
||||||
|
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||||
|
import { ATTRIBUTE_TYPES } from 'constants/queryBuilder';
|
||||||
|
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||||
|
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||||
|
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
|
import { MetricAggregation } from 'types/api/v5/queryRange';
|
||||||
|
import { DataSource, ReduceOperators } from 'types/common/queryBuilder';
|
||||||
|
|
||||||
|
import { MetricNameSelector } from './MetricNameSelector';
|
||||||
|
|
||||||
|
const mockUseListMetrics = jest.fn();
|
||||||
|
jest.mock('api/generated/services/metrics', () => ({
|
||||||
|
useListMetrics: (...args: unknown[]): ReturnType<typeof mockUseListMetrics> =>
|
||||||
|
mockUseListMetrics(...args),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('hooks/useDebounce', () => ({
|
||||||
|
__esModule: true,
|
||||||
|
default: <T,>(value: T): T => value,
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('../QueryBuilderSearch/OptionRenderer', () => ({
|
||||||
|
__esModule: true,
|
||||||
|
default: ({ value }: { value: string }): JSX.Element => <span>{value}</span>,
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Ref lets StatefulMetricQueryHarness wire handleSetQueryData to real state,
|
||||||
|
// while other tests keep the default no-op mock.
|
||||||
|
const handleSetQueryDataRef: {
|
||||||
|
current: (index: number, query: IBuilderQuery) => void;
|
||||||
|
} = {
|
||||||
|
current: jest.fn(),
|
||||||
|
};
|
||||||
|
|
||||||
|
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
|
||||||
|
useQueryBuilder: (): Record<string, unknown> => ({
|
||||||
|
handleSetQueryData: (index: number, query: IBuilderQuery): void =>
|
||||||
|
handleSetQueryDataRef.current(index, query),
|
||||||
|
handleSetTraceOperatorData: jest.fn(),
|
||||||
|
handleSetFormulaData: jest.fn(),
|
||||||
|
removeQueryBuilderEntityByIndex: jest.fn(),
|
||||||
|
panelType: 'TIME_SERIES',
|
||||||
|
initialDataSource: DataSource.METRICS,
|
||||||
|
currentQuery: {
|
||||||
|
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
|
||||||
|
queryType: 'builder',
|
||||||
|
},
|
||||||
|
setLastUsedQuery: jest.fn(),
|
||||||
|
redirectWithQueryBuilderData: jest.fn(),
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
function makeMetric(
|
||||||
|
overrides: Partial<MetricsexplorertypesListMetricDTO> = {},
|
||||||
|
): MetricsexplorertypesListMetricDTO {
|
||||||
|
return {
|
||||||
|
metricName: 'http_requests_total',
|
||||||
|
type: MetrictypesTypeDTO.sum,
|
||||||
|
isMonotonic: true,
|
||||||
|
description: '',
|
||||||
|
temporality: 'cumulative' as never,
|
||||||
|
unit: '',
|
||||||
|
...overrides,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeQuery(overrides: Partial<IBuilderQuery> = {}): IBuilderQuery {
|
||||||
|
return {
|
||||||
|
dataSource: DataSource.METRICS,
|
||||||
|
queryName: 'A',
|
||||||
|
aggregateOperator: 'count',
|
||||||
|
aggregateAttribute: { key: '', type: '', dataType: DataTypes.Float64 },
|
||||||
|
timeAggregation: 'avg',
|
||||||
|
spaceAggregation: 'sum',
|
||||||
|
filter: { expression: '' },
|
||||||
|
aggregations: [],
|
||||||
|
functions: [],
|
||||||
|
filters: { items: [], op: 'AND' },
|
||||||
|
expression: 'A',
|
||||||
|
disabled: false,
|
||||||
|
stepInterval: null,
|
||||||
|
having: [],
|
||||||
|
limit: null,
|
||||||
|
orderBy: [],
|
||||||
|
groupBy: [],
|
||||||
|
legend: '',
|
||||||
|
reduceTo: ReduceOperators.AVG,
|
||||||
|
...overrides,
|
||||||
|
} as IBuilderQuery;
|
||||||
|
}
|
||||||
|
|
||||||
|
function returnMetrics(
|
||||||
|
metrics: MetricsexplorertypesListMetricDTO[],
|
||||||
|
overrides: Record<string, unknown> = {},
|
||||||
|
): void {
|
||||||
|
mockUseListMetrics.mockReturnValue({
|
||||||
|
isFetching: false,
|
||||||
|
isError: false,
|
||||||
|
data: { data: { metrics } },
|
||||||
|
queryKey: ['/api/v2/metrics'],
|
||||||
|
...overrides,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// snippet so tests can assert on them.
|
||||||
|
function MetricQueryHarness({ query }: { query: IBuilderQuery }): JSX.Element {
|
||||||
|
const {
|
||||||
|
handleChangeAggregatorAttribute,
|
||||||
|
operators,
|
||||||
|
spaceAggregationOptions,
|
||||||
|
} = useQueryOperations({
|
||||||
|
query,
|
||||||
|
index: 0,
|
||||||
|
entityVersion: ENTITY_VERSION_V5,
|
||||||
|
});
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<MetricNameSelector
|
||||||
|
query={query}
|
||||||
|
onChange={handleChangeAggregatorAttribute}
|
||||||
|
/>
|
||||||
|
<ul data-testid="time-agg-options">
|
||||||
|
{operators.map((op) => (
|
||||||
|
<li key={op.value}>{op.label}</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
<ul data-testid="space-agg-options">
|
||||||
|
{spaceAggregationOptions.map((op) => (
|
||||||
|
<li key={op.value}>{op.label}</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getOptionLabels(testId: string): string[] {
|
||||||
|
const list = screen.getByTestId(testId);
|
||||||
|
const items = within(list).queryAllByRole('listitem');
|
||||||
|
return items.map((el) => el.textContent || '');
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('MetricNameSelector', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
handleSetQueryDataRef.current = jest.fn();
|
||||||
|
returnMetrics([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows metric names from API as dropdown options', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({ metricName: 'http_requests_total' }),
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'cpu_usage_percent',
|
||||||
|
type: MetrictypesTypeDTO.gauge,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(<MetricNameSelector query={makeQuery()} onChange={jest.fn()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'h' } });
|
||||||
|
|
||||||
|
expect(
|
||||||
|
screen.getAllByText('http_requests_total').length,
|
||||||
|
).toBeGreaterThanOrEqual(1);
|
||||||
|
expect(
|
||||||
|
screen.getAllByText('cpu_usage_percent').length,
|
||||||
|
).toBeGreaterThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('retains typed metric name in input after blur', () => {
|
||||||
|
returnMetrics([makeMetric({ metricName: 'http_requests_total' })]);
|
||||||
|
|
||||||
|
render(<MetricNameSelector query={makeQuery()} onChange={jest.fn()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'http_requests_total' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(input).toHaveValue('http_requests_total');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows error message when API request fails', () => {
|
||||||
|
mockUseListMetrics.mockReturnValue({
|
||||||
|
isFetching: false,
|
||||||
|
isError: true,
|
||||||
|
data: undefined,
|
||||||
|
queryKey: ['/api/v2/metrics'],
|
||||||
|
});
|
||||||
|
|
||||||
|
render(<MetricNameSelector query={makeQuery()} onChange={jest.fn()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.focus(input);
|
||||||
|
fireEvent.change(input, { target: { value: 'test' } });
|
||||||
|
|
||||||
|
expect(screen.getByText('Failed to load metrics')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows loading spinner while fetching metrics', () => {
|
||||||
|
mockUseListMetrics.mockReturnValue({
|
||||||
|
isFetching: true,
|
||||||
|
isError: false,
|
||||||
|
data: undefined,
|
||||||
|
queryKey: ['/api/v2/metrics'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const { container } = render(
|
||||||
|
<MetricNameSelector query={makeQuery()} onChange={jest.fn()} />,
|
||||||
|
);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'test' } });
|
||||||
|
|
||||||
|
expect(container.querySelector('.ant-spin-spinning')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('selecting a metric type updates the aggregation options', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
handleSetQueryDataRef.current = jest.fn();
|
||||||
|
returnMetrics([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Sum metric shows Rate/Increase time options and Sum/Avg/Min/Max space options', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'http_requests_total',
|
||||||
|
type: MetrictypesTypeDTO.sum,
|
||||||
|
isMonotonic: true,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(<MetricQueryHarness query={makeQuery()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'http_requests_total' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(getOptionLabels('time-agg-options')).toEqual(['Rate', 'Increase']);
|
||||||
|
expect(getOptionLabels('space-agg-options')).toEqual([
|
||||||
|
'Sum',
|
||||||
|
'Avg',
|
||||||
|
'Min',
|
||||||
|
'Max',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gauge metric shows Latest/Sum/Avg/Min/Max/Count/Count Distinct time options and Sum/Avg/Min/Max space options', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'cpu_usage_percent',
|
||||||
|
type: MetrictypesTypeDTO.gauge,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(<MetricQueryHarness query={makeQuery()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'cpu_usage_percent' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(getOptionLabels('time-agg-options')).toEqual([
|
||||||
|
'Latest',
|
||||||
|
'Sum',
|
||||||
|
'Avg',
|
||||||
|
'Min',
|
||||||
|
'Max',
|
||||||
|
'Count',
|
||||||
|
'Count Distinct',
|
||||||
|
]);
|
||||||
|
expect(getOptionLabels('space-agg-options')).toEqual([
|
||||||
|
'Sum',
|
||||||
|
'Avg',
|
||||||
|
'Min',
|
||||||
|
'Max',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('non-monotonic Sum metric is treated as Gauge', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'active_connections',
|
||||||
|
type: MetrictypesTypeDTO.sum,
|
||||||
|
isMonotonic: false,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(<MetricQueryHarness query={makeQuery()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, {
|
||||||
|
target: { value: 'active_connections' },
|
||||||
|
});
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(getOptionLabels('time-agg-options')).toEqual([
|
||||||
|
'Latest',
|
||||||
|
'Sum',
|
||||||
|
'Avg',
|
||||||
|
'Min',
|
||||||
|
'Max',
|
||||||
|
'Count',
|
||||||
|
'Count Distinct',
|
||||||
|
]);
|
||||||
|
expect(getOptionLabels('space-agg-options')).toEqual([
|
||||||
|
'Sum',
|
||||||
|
'Avg',
|
||||||
|
'Min',
|
||||||
|
'Max',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Histogram metric shows no time options and P50–P99 space options', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'request_duration_seconds',
|
||||||
|
type: MetrictypesTypeDTO.histogram,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(<MetricQueryHarness query={makeQuery()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, {
|
||||||
|
target: { value: 'request_duration_seconds' },
|
||||||
|
});
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(getOptionLabels('time-agg-options')).toEqual([]);
|
||||||
|
expect(getOptionLabels('space-agg-options')).toEqual([
|
||||||
|
'P50',
|
||||||
|
'P75',
|
||||||
|
'P90',
|
||||||
|
'P95',
|
||||||
|
'P99',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ExponentialHistogram metric shows no time options and P50–P99 space options', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'request_duration_exp',
|
||||||
|
type: MetrictypesTypeDTO.exponentialhistogram,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(<MetricQueryHarness query={makeQuery()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, {
|
||||||
|
target: { value: 'request_duration_exp' },
|
||||||
|
});
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(getOptionLabels('time-agg-options')).toEqual([]);
|
||||||
|
expect(getOptionLabels('space-agg-options')).toEqual([
|
||||||
|
'P50',
|
||||||
|
'P75',
|
||||||
|
'P90',
|
||||||
|
'P95',
|
||||||
|
'P99',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('unknown metric (typed name not in API results) shows all time and space options', () => {
|
||||||
|
returnMetrics([makeMetric({ metricName: 'known_metric' })]);
|
||||||
|
|
||||||
|
render(<MetricQueryHarness query={makeQuery()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'unknown_metric' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(getOptionLabels('time-agg-options')).toEqual([
|
||||||
|
'Max',
|
||||||
|
'Min',
|
||||||
|
'Sum',
|
||||||
|
'Avg',
|
||||||
|
'Count',
|
||||||
|
'Rate',
|
||||||
|
'Increase',
|
||||||
|
]);
|
||||||
|
expect(getOptionLabels('space-agg-options')).toEqual([
|
||||||
|
'Sum',
|
||||||
|
'Avg',
|
||||||
|
'Min',
|
||||||
|
'Max',
|
||||||
|
'P50',
|
||||||
|
'P75',
|
||||||
|
'P90',
|
||||||
|
'P95',
|
||||||
|
'P99',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// these tests require the previous state, so we setup it to
|
||||||
|
// tracks previousMetricInfo across metric selections
|
||||||
|
function StatefulMetricQueryHarness({
|
||||||
|
initialQuery,
|
||||||
|
}: {
|
||||||
|
initialQuery: IBuilderQuery;
|
||||||
|
}): JSX.Element {
|
||||||
|
const [query, setQuery] = useState(initialQuery);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
handleSetQueryDataRef.current = (
|
||||||
|
_index: number,
|
||||||
|
newQuery: IBuilderQuery,
|
||||||
|
): void => {
|
||||||
|
setQuery(newQuery);
|
||||||
|
};
|
||||||
|
return (): void => {
|
||||||
|
handleSetQueryDataRef.current = jest.fn();
|
||||||
|
};
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const {
|
||||||
|
handleChangeAggregatorAttribute,
|
||||||
|
operators,
|
||||||
|
spaceAggregationOptions,
|
||||||
|
} = useQueryOperations({
|
||||||
|
query,
|
||||||
|
index: 0,
|
||||||
|
entityVersion: ENTITY_VERSION_V5,
|
||||||
|
});
|
||||||
|
|
||||||
|
const currentAggregation = query.aggregations?.[0] as MetricAggregation;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<MetricNameSelector
|
||||||
|
query={query}
|
||||||
|
onChange={handleChangeAggregatorAttribute}
|
||||||
|
/>
|
||||||
|
<ul data-testid="time-agg-options">
|
||||||
|
{operators.map((op) => (
|
||||||
|
<li key={op.value}>{op.label}</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
<ul data-testid="space-agg-options">
|
||||||
|
{spaceAggregationOptions.map((op) => (
|
||||||
|
<li key={op.value}>{op.label}</li>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
<div data-testid="selected-time-agg">
|
||||||
|
{currentAggregation?.timeAggregation || ''}
|
||||||
|
</div>
|
||||||
|
<div data-testid="selected-space-agg">
|
||||||
|
{currentAggregation?.spaceAggregation || ''}
|
||||||
|
</div>
|
||||||
|
<div data-testid="selected-metric-name">
|
||||||
|
{currentAggregation?.metricName || ''}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('switching between metrics of the same type preserves aggregation settings', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
handleSetQueryDataRef.current = jest.fn();
|
||||||
|
returnMetrics([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Sum: preserves non-default increase/avg when switching to another Sum metric', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'metric_a',
|
||||||
|
type: MetrictypesTypeDTO.sum,
|
||||||
|
isMonotonic: true,
|
||||||
|
}),
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'metric_b',
|
||||||
|
type: MetrictypesTypeDTO.sum,
|
||||||
|
isMonotonic: true,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<StatefulMetricQueryHarness
|
||||||
|
initialQuery={makeQuery({
|
||||||
|
aggregateAttribute: {
|
||||||
|
key: 'metric_a',
|
||||||
|
type: ATTRIBUTE_TYPES.SUM,
|
||||||
|
dataType: DataTypes.Float64,
|
||||||
|
},
|
||||||
|
aggregations: [
|
||||||
|
{
|
||||||
|
timeAggregation: 'increase',
|
||||||
|
spaceAggregation: 'avg',
|
||||||
|
metricName: 'metric_a',
|
||||||
|
temporality: '',
|
||||||
|
},
|
||||||
|
] as MetricAggregation[],
|
||||||
|
})}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-time-agg')).toHaveTextContent('increase');
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('avg');
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'metric_b' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-time-agg')).toHaveTextContent('increase');
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('avg');
|
||||||
|
expect(screen.getByTestId('selected-metric-name')).toHaveTextContent(
|
||||||
|
'metric_b',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gauge: preserves non-default min/max when switching to another Gauge metric', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'cpu_usage',
|
||||||
|
type: MetrictypesTypeDTO.gauge,
|
||||||
|
}),
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'mem_usage',
|
||||||
|
type: MetrictypesTypeDTO.gauge,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<StatefulMetricQueryHarness
|
||||||
|
initialQuery={makeQuery({
|
||||||
|
aggregateAttribute: {
|
||||||
|
key: 'cpu_usage',
|
||||||
|
type: ATTRIBUTE_TYPES.GAUGE,
|
||||||
|
dataType: DataTypes.Float64,
|
||||||
|
},
|
||||||
|
aggregations: [
|
||||||
|
{
|
||||||
|
timeAggregation: 'min',
|
||||||
|
spaceAggregation: 'max',
|
||||||
|
metricName: 'cpu_usage',
|
||||||
|
temporality: '',
|
||||||
|
},
|
||||||
|
] as MetricAggregation[],
|
||||||
|
})}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-time-agg')).toHaveTextContent('min');
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('max');
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'mem_usage' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-time-agg')).toHaveTextContent('min');
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('max');
|
||||||
|
expect(screen.getByTestId('selected-metric-name')).toHaveTextContent(
|
||||||
|
'mem_usage',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Histogram: preserves non-default p99 when switching to another Histogram metric', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'req_duration',
|
||||||
|
type: MetrictypesTypeDTO.histogram,
|
||||||
|
}),
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'db_latency',
|
||||||
|
type: MetrictypesTypeDTO.histogram,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<StatefulMetricQueryHarness
|
||||||
|
initialQuery={makeQuery({
|
||||||
|
aggregateAttribute: {
|
||||||
|
key: 'req_duration',
|
||||||
|
type: ATTRIBUTE_TYPES.HISTOGRAM,
|
||||||
|
dataType: DataTypes.Float64,
|
||||||
|
},
|
||||||
|
aggregations: [
|
||||||
|
{
|
||||||
|
timeAggregation: '',
|
||||||
|
spaceAggregation: 'p99',
|
||||||
|
metricName: 'req_duration',
|
||||||
|
temporality: '',
|
||||||
|
},
|
||||||
|
] as MetricAggregation[],
|
||||||
|
})}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('p99');
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'db_latency' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('p99');
|
||||||
|
expect(screen.getByTestId('selected-metric-name')).toHaveTextContent(
|
||||||
|
'db_latency',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ExponentialHistogram: preserves non-default p75 when switching to another ExponentialHistogram metric', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'exp_hist_a',
|
||||||
|
type: MetrictypesTypeDTO.exponentialhistogram,
|
||||||
|
}),
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'exp_hist_b',
|
||||||
|
type: MetrictypesTypeDTO.exponentialhistogram,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<StatefulMetricQueryHarness
|
||||||
|
initialQuery={makeQuery({
|
||||||
|
aggregateAttribute: {
|
||||||
|
key: 'exp_hist_a',
|
||||||
|
type: ATTRIBUTE_TYPES.EXPONENTIAL_HISTOGRAM,
|
||||||
|
dataType: DataTypes.Float64,
|
||||||
|
},
|
||||||
|
aggregations: [
|
||||||
|
{
|
||||||
|
timeAggregation: '',
|
||||||
|
spaceAggregation: 'p75',
|
||||||
|
metricName: 'exp_hist_a',
|
||||||
|
temporality: '',
|
||||||
|
},
|
||||||
|
] as MetricAggregation[],
|
||||||
|
})}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('p75');
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'exp_hist_b' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('p75');
|
||||||
|
expect(screen.getByTestId('selected-metric-name')).toHaveTextContent(
|
||||||
|
'exp_hist_b',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('switching to a different metric type resets aggregation to new defaults', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
handleSetQueryDataRef.current = jest.fn();
|
||||||
|
returnMetrics([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Sum to Gauge: resets from increase/avg to the Gauge defaults avg/avg', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'sum_metric',
|
||||||
|
type: MetrictypesTypeDTO.sum,
|
||||||
|
isMonotonic: true,
|
||||||
|
}),
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'gauge_metric',
|
||||||
|
type: MetrictypesTypeDTO.gauge,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<StatefulMetricQueryHarness
|
||||||
|
initialQuery={makeQuery({
|
||||||
|
aggregateAttribute: {
|
||||||
|
key: 'sum_metric',
|
||||||
|
type: ATTRIBUTE_TYPES.SUM,
|
||||||
|
dataType: DataTypes.Float64,
|
||||||
|
},
|
||||||
|
aggregations: [
|
||||||
|
{
|
||||||
|
timeAggregation: 'increase',
|
||||||
|
spaceAggregation: 'avg',
|
||||||
|
metricName: 'sum_metric',
|
||||||
|
temporality: '',
|
||||||
|
},
|
||||||
|
] as MetricAggregation[],
|
||||||
|
})}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'gauge_metric' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-time-agg')).toHaveTextContent('avg');
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('avg');
|
||||||
|
expect(screen.getByTestId('selected-metric-name')).toHaveTextContent(
|
||||||
|
'gauge_metric',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gauge to Histogram: resets from min/max to the Histogram defaults (no time, p90 space)', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'gauge_metric',
|
||||||
|
type: MetrictypesTypeDTO.gauge,
|
||||||
|
}),
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'hist_metric',
|
||||||
|
type: MetrictypesTypeDTO.histogram,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<StatefulMetricQueryHarness
|
||||||
|
initialQuery={makeQuery({
|
||||||
|
aggregateAttribute: {
|
||||||
|
key: 'gauge_metric',
|
||||||
|
type: ATTRIBUTE_TYPES.GAUGE,
|
||||||
|
dataType: DataTypes.Float64,
|
||||||
|
},
|
||||||
|
aggregations: [
|
||||||
|
{
|
||||||
|
timeAggregation: 'min',
|
||||||
|
spaceAggregation: 'max',
|
||||||
|
metricName: 'gauge_metric',
|
||||||
|
temporality: '',
|
||||||
|
},
|
||||||
|
] as MetricAggregation[],
|
||||||
|
})}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'hist_metric' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-time-agg')).toHaveTextContent('');
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('p90');
|
||||||
|
expect(screen.getByTestId('selected-metric-name')).toHaveTextContent(
|
||||||
|
'hist_metric',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Histogram to Sum: resets from p99 to the Sum defaults rate/sum', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'hist_metric',
|
||||||
|
type: MetrictypesTypeDTO.histogram,
|
||||||
|
}),
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'sum_metric',
|
||||||
|
type: MetrictypesTypeDTO.sum,
|
||||||
|
isMonotonic: true,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<StatefulMetricQueryHarness
|
||||||
|
initialQuery={makeQuery({
|
||||||
|
aggregateAttribute: {
|
||||||
|
key: 'hist_metric',
|
||||||
|
type: ATTRIBUTE_TYPES.HISTOGRAM,
|
||||||
|
dataType: DataTypes.Float64,
|
||||||
|
},
|
||||||
|
aggregations: [
|
||||||
|
{
|
||||||
|
timeAggregation: '',
|
||||||
|
spaceAggregation: 'p99',
|
||||||
|
metricName: 'hist_metric',
|
||||||
|
temporality: '',
|
||||||
|
},
|
||||||
|
] as MetricAggregation[],
|
||||||
|
})}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'sum_metric' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-time-agg')).toHaveTextContent('rate');
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('sum');
|
||||||
|
expect(screen.getByTestId('selected-metric-name')).toHaveTextContent(
|
||||||
|
'sum_metric',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('typed metric not in search results is committed with unknown defaults', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
handleSetQueryDataRef.current = jest.fn();
|
||||||
|
returnMetrics([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Gauge to unknown metric: resets from Gauge aggregations to unknown defaults (avg/avg)', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'cpu_usage',
|
||||||
|
type: MetrictypesTypeDTO.gauge,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(
|
||||||
|
<StatefulMetricQueryHarness
|
||||||
|
initialQuery={makeQuery({
|
||||||
|
aggregateAttribute: {
|
||||||
|
key: 'cpu_usage',
|
||||||
|
type: ATTRIBUTE_TYPES.GAUGE,
|
||||||
|
dataType: DataTypes.Float64,
|
||||||
|
},
|
||||||
|
aggregations: [
|
||||||
|
{
|
||||||
|
timeAggregation: 'min',
|
||||||
|
spaceAggregation: 'max',
|
||||||
|
metricName: 'cpu_usage',
|
||||||
|
temporality: '',
|
||||||
|
},
|
||||||
|
] as MetricAggregation[],
|
||||||
|
})}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByTestId('selected-time-agg')).toHaveTextContent('min');
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('max');
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, { target: { value: 'unknown_metric' } });
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
// Metric not in search results is committed with empty type resets to unknown defaults
|
||||||
|
expect(screen.getByTestId('selected-time-agg')).toHaveTextContent('avg');
|
||||||
|
expect(screen.getByTestId('selected-space-agg')).toHaveTextContent('avg');
|
||||||
|
expect(screen.getByTestId('selected-metric-name')).toHaveTextContent(
|
||||||
|
'unknown_metric',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Summary metric type is treated as Gauge', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
handleSetQueryDataRef.current = jest.fn();
|
||||||
|
returnMetrics([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('selecting a Summary metric shows Gauge aggregation options', () => {
|
||||||
|
returnMetrics([
|
||||||
|
makeMetric({
|
||||||
|
metricName: 'rpc_duration_summary',
|
||||||
|
type: MetrictypesTypeDTO.summary,
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
|
||||||
|
render(<MetricQueryHarness query={makeQuery()} />);
|
||||||
|
|
||||||
|
const input = screen.getByRole('combobox');
|
||||||
|
fireEvent.change(input, {
|
||||||
|
target: { value: 'rpc_duration_summary' },
|
||||||
|
});
|
||||||
|
fireEvent.blur(input);
|
||||||
|
|
||||||
|
expect(getOptionLabels('time-agg-options')).toEqual([
|
||||||
|
'Latest',
|
||||||
|
'Sum',
|
||||||
|
'Avg',
|
||||||
|
'Min',
|
||||||
|
'Max',
|
||||||
|
'Count',
|
||||||
|
'Count Distinct',
|
||||||
|
]);
|
||||||
|
expect(getOptionLabels('space-agg-options')).toEqual([
|
||||||
|
'Sum',
|
||||||
|
'Avg',
|
||||||
|
'Min',
|
||||||
|
'Max',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,266 @@
|
|||||||
|
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||||
|
import { flushSync } from 'react-dom';
|
||||||
|
import { AutoComplete, Spin, Typography } from 'antd';
|
||||||
|
import { useListMetrics } from 'api/generated/services/metrics';
|
||||||
|
import { MetricsexplorertypesListMetricDTO } from 'api/generated/services/sigNoz.schemas';
|
||||||
|
import { ATTRIBUTE_TYPES, toAttributeType } from 'constants/queryBuilder';
|
||||||
|
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
|
||||||
|
import useDebounce from 'hooks/useDebounce';
|
||||||
|
import {
|
||||||
|
BaseAutocompleteData,
|
||||||
|
DataTypes,
|
||||||
|
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||||
|
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
|
import { MetricAggregation } from 'types/api/v5/queryRange';
|
||||||
|
import { ExtendedSelectOption } from 'types/common/select';
|
||||||
|
import { popupContainer } from 'utils/selectPopupContainer';
|
||||||
|
|
||||||
|
import { selectStyle } from '../QueryBuilderSearch/config';
|
||||||
|
import OptionRenderer from '../QueryBuilderSearch/OptionRenderer';
|
||||||
|
|
||||||
|
import './MetricNameSelector.styles.scss';
|
||||||
|
|
||||||
|
export type MetricNameSelectorProps = {
|
||||||
|
query: IBuilderQuery;
|
||||||
|
onChange: (value: BaseAutocompleteData, isEditMode?: boolean) => void;
|
||||||
|
disabled?: boolean;
|
||||||
|
defaultValue?: string;
|
||||||
|
onSelect?: (value: BaseAutocompleteData) => void;
|
||||||
|
signalSource?: 'meter' | '';
|
||||||
|
};
|
||||||
|
|
||||||
|
function getAttributeType(
|
||||||
|
metric: MetricsexplorertypesListMetricDTO,
|
||||||
|
): ATTRIBUTE_TYPES | '' {
|
||||||
|
return toAttributeType(metric.type, metric.isMonotonic);
|
||||||
|
}
|
||||||
|
|
||||||
|
function createAutocompleteData(
|
||||||
|
metricName: string,
|
||||||
|
type: string,
|
||||||
|
): BaseAutocompleteData {
|
||||||
|
return { key: metricName, type, dataType: DataTypes.Float64 };
|
||||||
|
}
|
||||||
|
|
||||||
|
// N.B on the metric name selector behaviour.
|
||||||
|
//
|
||||||
|
// Metric aggregation options resolution:
|
||||||
|
// The component maintains a ref (metricsRef) of the latest API results.
|
||||||
|
// When the user commits a metric name (via dropdown select, blur, or Cmd+Enter),
|
||||||
|
// resolveMetricFromText looks up the metric in metricsRef to determine its type
|
||||||
|
// (Sum, Gauge, Histogram, etc.). If the metric isn't found (e.g. the user typed
|
||||||
|
// a name before the debounced search returned), the type is empty and downstream
|
||||||
|
// treats it as unknown.
|
||||||
|
//
|
||||||
|
// Selection handling:
|
||||||
|
// - Dropdown select: user picks from the dropdown; type is always resolved
|
||||||
|
// since the option came from the current API results.
|
||||||
|
// - Blur: user typed a name and tabbed/clicked away without selecting from
|
||||||
|
// the dropdown. If the name differs from the current metric, it's resolved
|
||||||
|
// and committed. If the input is empty, it resets to the current metric name.
|
||||||
|
// - Cmd/Ctrl+Enter: resolves the typed name and commits it using flushSync
|
||||||
|
// so the state update is processed synchronously before QueryBuilderV2's
|
||||||
|
// onKeyDownCapture fires handleRunQuery. Uses document-level capture phase
|
||||||
|
// to run before React's root-level event dispatch. However, there is still one
|
||||||
|
// need to be handled here. TODO(srikanthccv): enter before n/w req completion
|
||||||
|
//
|
||||||
|
// Edit mode:
|
||||||
|
// When a saved query is loaded, the metric name may be set via aggregations
|
||||||
|
// but aggregateAttribute.type may be missing. Once the API returns metric data,
|
||||||
|
// the component calls onChange with isEditMode=true to backfill the type without
|
||||||
|
// resetting aggregation options.
|
||||||
|
//
|
||||||
|
// Signal source:
|
||||||
|
// When signalSource is 'meter', the API is filtered to meter metrics only.
|
||||||
|
// Changing signalSource clears the input and search text.
|
||||||
|
|
||||||
|
export const MetricNameSelector = memo(function MetricNameSelector({
|
||||||
|
query,
|
||||||
|
onChange,
|
||||||
|
disabled,
|
||||||
|
defaultValue,
|
||||||
|
onSelect,
|
||||||
|
signalSource,
|
||||||
|
}: MetricNameSelectorProps): JSX.Element {
|
||||||
|
const currentMetricName =
|
||||||
|
(query.aggregations?.[0] as MetricAggregation)?.metricName ||
|
||||||
|
query.aggregateAttribute?.key ||
|
||||||
|
'';
|
||||||
|
|
||||||
|
const [inputValue, setInputValue] = useState<string>(
|
||||||
|
currentMetricName || defaultValue || '',
|
||||||
|
);
|
||||||
|
const [searchText, setSearchText] = useState<string>(currentMetricName);
|
||||||
|
|
||||||
|
const metricsRef = useRef<MetricsexplorertypesListMetricDTO[]>([]);
|
||||||
|
const selectedFromDropdownRef = useRef(false);
|
||||||
|
const prevSignalSourceRef = useRef(signalSource);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
setInputValue(currentMetricName || defaultValue || '');
|
||||||
|
}, [defaultValue, currentMetricName]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (prevSignalSourceRef.current !== signalSource) {
|
||||||
|
prevSignalSourceRef.current = signalSource;
|
||||||
|
setSearchText('');
|
||||||
|
setInputValue('');
|
||||||
|
}
|
||||||
|
}, [signalSource]);
|
||||||
|
|
||||||
|
const debouncedValue = useDebounce(searchText, DEBOUNCE_DELAY);
|
||||||
|
|
||||||
|
const { isFetching, isError, data: listMetricsData } = useListMetrics(
|
||||||
|
{
|
||||||
|
searchText: debouncedValue,
|
||||||
|
limit: 100,
|
||||||
|
source: signalSource || undefined,
|
||||||
|
} as Record<string, unknown>,
|
||||||
|
{
|
||||||
|
query: {
|
||||||
|
keepPreviousData: false,
|
||||||
|
retry: 2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
const metrics = useMemo(() => listMetricsData?.data?.metrics ?? [], [
|
||||||
|
listMetricsData,
|
||||||
|
]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
metricsRef.current = metrics;
|
||||||
|
}, [metrics]);
|
||||||
|
|
||||||
|
const optionsData = useMemo((): ExtendedSelectOption[] => {
|
||||||
|
if (!metrics.length) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
return metrics.map((metric) => ({
|
||||||
|
label: (
|
||||||
|
<OptionRenderer
|
||||||
|
label={metric.metricName}
|
||||||
|
value={metric.metricName}
|
||||||
|
dataType={DataTypes.Float64}
|
||||||
|
type={getAttributeType(metric) || ''}
|
||||||
|
/>
|
||||||
|
),
|
||||||
|
value: metric.metricName,
|
||||||
|
key: metric.metricName,
|
||||||
|
}));
|
||||||
|
}, [metrics]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const metricName = (query.aggregations?.[0] as MetricAggregation)?.metricName;
|
||||||
|
const hasAggregateAttributeType = query.aggregateAttribute?.type;
|
||||||
|
|
||||||
|
if (metricName && !hasAggregateAttributeType && metrics.length > 0) {
|
||||||
|
const found = metrics.find((m) => m.metricName === metricName);
|
||||||
|
if (found) {
|
||||||
|
onChange(
|
||||||
|
createAutocompleteData(found.metricName, getAttributeType(found)),
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [metrics, query.aggregations, query.aggregateAttribute?.type, onChange]);
|
||||||
|
|
||||||
|
const resolveMetricFromText = useCallback(
|
||||||
|
(text: string): BaseAutocompleteData => {
|
||||||
|
const found = metricsRef.current.find((m) => m.metricName === text);
|
||||||
|
if (found) {
|
||||||
|
return createAutocompleteData(found.metricName, getAttributeType(found));
|
||||||
|
}
|
||||||
|
return createAutocompleteData(text, '');
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
);
|
||||||
|
|
||||||
|
const placeholder = useMemo(() => {
|
||||||
|
if (signalSource === 'meter') {
|
||||||
|
return 'Search for a meter metric...';
|
||||||
|
}
|
||||||
|
return 'Search for a metric...';
|
||||||
|
}, [signalSource]);
|
||||||
|
|
||||||
|
const handleChange = useCallback((value: string): void => {
|
||||||
|
setInputValue(value);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleSearch = useCallback((value: string): void => {
|
||||||
|
setSearchText(value);
|
||||||
|
selectedFromDropdownRef.current = false;
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleSelect = useCallback(
|
||||||
|
(value: string): void => {
|
||||||
|
selectedFromDropdownRef.current = true;
|
||||||
|
const resolved = resolveMetricFromText(value);
|
||||||
|
onChange(resolved);
|
||||||
|
if (onSelect) {
|
||||||
|
onSelect(resolved);
|
||||||
|
}
|
||||||
|
setSearchText('');
|
||||||
|
},
|
||||||
|
[onChange, onSelect, resolveMetricFromText],
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleBlur = useCallback(() => {
|
||||||
|
if (selectedFromDropdownRef.current) {
|
||||||
|
selectedFromDropdownRef.current = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const typedValue = inputValue?.trim() || '';
|
||||||
|
if (typedValue && typedValue !== currentMetricName) {
|
||||||
|
onChange(resolveMetricFromText(typedValue));
|
||||||
|
} else if (!typedValue && currentMetricName) {
|
||||||
|
setInputValue(currentMetricName);
|
||||||
|
}
|
||||||
|
}, [inputValue, currentMetricName, onChange, resolveMetricFromText]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const handleKeyDown = (e: KeyboardEvent): void => {
|
||||||
|
if (e.key === 'Enter' && (e.metaKey || e.ctrlKey)) {
|
||||||
|
const typedValue = inputValue?.trim() || '';
|
||||||
|
if (typedValue && typedValue !== currentMetricName) {
|
||||||
|
flushSync(() => {
|
||||||
|
onChange(resolveMetricFromText(typedValue));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
document.addEventListener('keydown', handleKeyDown, true);
|
||||||
|
return (): void => {
|
||||||
|
document.removeEventListener('keydown', handleKeyDown, true);
|
||||||
|
};
|
||||||
|
}, [inputValue, currentMetricName, onChange, resolveMetricFromText]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<AutoComplete
|
||||||
|
className="metric-name-selector"
|
||||||
|
getPopupContainer={popupContainer}
|
||||||
|
style={selectStyle}
|
||||||
|
filterOption={false}
|
||||||
|
placeholder={placeholder}
|
||||||
|
onSearch={handleSearch}
|
||||||
|
onChange={handleChange}
|
||||||
|
notFoundContent={
|
||||||
|
isFetching ? (
|
||||||
|
<Spin size="small" />
|
||||||
|
) : isError ? (
|
||||||
|
<Typography.Text type="danger" style={{ fontSize: 12 }}>
|
||||||
|
Failed to load metrics
|
||||||
|
</Typography.Text>
|
||||||
|
) : null
|
||||||
|
}
|
||||||
|
options={optionsData}
|
||||||
|
value={inputValue}
|
||||||
|
onBlur={handleBlur}
|
||||||
|
onSelect={handleSelect}
|
||||||
|
disabled={disabled}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
});
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
export type { MetricNameSelectorProps } from './MetricNameSelector';
|
||||||
|
export { MetricNameSelector } from './MetricNameSelector';
|
||||||
@@ -2,6 +2,7 @@ export { AggregatorFilter } from './AggregatorFilter';
|
|||||||
export { BuilderUnitsFilter } from './BuilderUnitsFilter';
|
export { BuilderUnitsFilter } from './BuilderUnitsFilter';
|
||||||
export { GroupByFilter } from './GroupByFilter';
|
export { GroupByFilter } from './GroupByFilter';
|
||||||
export { HavingFilter } from './HavingFilter';
|
export { HavingFilter } from './HavingFilter';
|
||||||
|
export { MetricNameSelector } from './MetricNameSelector';
|
||||||
export { OperatorsSelect } from './OperatorsSelect';
|
export { OperatorsSelect } from './OperatorsSelect';
|
||||||
export { OrderByFilter } from './OrderByFilter';
|
export { OrderByFilter } from './OrderByFilter';
|
||||||
export { ReduceToFilter } from './ReduceToFilter';
|
export { ReduceToFilter } from './ReduceToFilter';
|
||||||
|
|||||||
@@ -257,7 +257,9 @@ function TimeSeriesView({
|
|||||||
chartData[0]?.length === 0 &&
|
chartData[0]?.length === 0 &&
|
||||||
!isLoading &&
|
!isLoading &&
|
||||||
!isError &&
|
!isError &&
|
||||||
dataSource === DataSource.METRICS && <EmptyMetricsSearch />}
|
dataSource === DataSource.METRICS && (
|
||||||
|
<EmptyMetricsSearch hasQueryResult={data !== undefined} />
|
||||||
|
)}
|
||||||
|
|
||||||
{!isLoading &&
|
{!isLoading &&
|
||||||
!isError &&
|
!isError &&
|
||||||
|
|||||||
@@ -248,19 +248,12 @@ export const useQueryOperations: UseQueryOperations = ({
|
|||||||
);
|
);
|
||||||
|
|
||||||
const handleChangeAggregatorAttribute = useCallback(
|
const handleChangeAggregatorAttribute = useCallback(
|
||||||
(
|
(value: BaseAutocompleteData, isEditMode?: boolean): void => {
|
||||||
value: BaseAutocompleteData,
|
|
||||||
isEditMode?: boolean,
|
|
||||||
attributeKeys?: BaseAutocompleteData[],
|
|
||||||
): void => {
|
|
||||||
const newQuery: IBuilderQuery = {
|
const newQuery: IBuilderQuery = {
|
||||||
...query,
|
...query,
|
||||||
aggregateAttribute: value,
|
aggregateAttribute: value,
|
||||||
};
|
};
|
||||||
|
|
||||||
const getAttributeKeyFromMetricName = (metricName: string): string =>
|
|
||||||
attributeKeys?.find((key) => key.key === metricName)?.type || '';
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
newQuery.dataSource === DataSource.METRICS &&
|
newQuery.dataSource === DataSource.METRICS &&
|
||||||
entityVersion === ENTITY_VERSION_V4
|
entityVersion === ENTITY_VERSION_V4
|
||||||
@@ -311,9 +304,7 @@ export const useQueryOperations: UseQueryOperations = ({
|
|||||||
// Get current metric info
|
// Get current metric info
|
||||||
const currentMetricType = newQuery.aggregateAttribute?.type || '';
|
const currentMetricType = newQuery.aggregateAttribute?.type || '';
|
||||||
|
|
||||||
const prevMetricType = previousMetricInfo?.type
|
const prevMetricType = previousMetricInfo?.type || '';
|
||||||
? previousMetricInfo.type
|
|
||||||
: getAttributeKeyFromMetricName(previousMetricInfo?.name || '');
|
|
||||||
|
|
||||||
// Check if metric type has changed by comparing with tracked previous values
|
// Check if metric type has changed by comparing with tracked previous values
|
||||||
const metricTypeChanged =
|
const metricTypeChanged =
|
||||||
@@ -374,7 +365,7 @@ export const useQueryOperations: UseQueryOperations = ({
|
|||||||
|
|
||||||
// Handled query with unknown metric to avoid 400 and 500 errors
|
// Handled query with unknown metric to avoid 400 and 500 errors
|
||||||
// With metric value typed and not available then - time - 'avg', space - 'avg'
|
// With metric value typed and not available then - time - 'avg', space - 'avg'
|
||||||
// If not typed - time - 'rate', space - 'sum', op - 'count'
|
// If not typed - time - 'avg', space - 'sum'
|
||||||
if (isEmpty(newQuery.aggregateAttribute?.type)) {
|
if (isEmpty(newQuery.aggregateAttribute?.type)) {
|
||||||
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
|
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
|
||||||
newQuery.aggregations = [
|
newQuery.aggregations = [
|
||||||
@@ -388,7 +379,7 @@ export const useQueryOperations: UseQueryOperations = ({
|
|||||||
} else {
|
} else {
|
||||||
newQuery.aggregations = [
|
newQuery.aggregations = [
|
||||||
{
|
{
|
||||||
timeAggregation: MetricAggregateOperator.COUNT,
|
timeAggregation: MetricAggregateOperator.AVG,
|
||||||
metricName: newQuery.aggregateAttribute?.key || '',
|
metricName: newQuery.aggregateAttribute?.key || '',
|
||||||
temporality: '',
|
temporality: '',
|
||||||
spaceAggregation: MetricAggregateOperator.SUM,
|
spaceAggregation: MetricAggregateOperator.SUM,
|
||||||
@@ -408,6 +399,29 @@ export const useQueryOperations: UseQueryOperations = ({
|
|||||||
];
|
];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Override with safe defaults when metric type is unknown to avoid 400/500 errors
|
||||||
|
if (isEmpty(newQuery.aggregateAttribute?.type)) {
|
||||||
|
if (!isEmpty(newQuery.aggregateAttribute?.key)) {
|
||||||
|
newQuery.aggregations = [
|
||||||
|
{
|
||||||
|
timeAggregation: MetricAggregateOperator.AVG,
|
||||||
|
metricName: newQuery.aggregateAttribute?.key || '',
|
||||||
|
temporality: '',
|
||||||
|
spaceAggregation: MetricAggregateOperator.AVG,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
} else {
|
||||||
|
newQuery.aggregations = [
|
||||||
|
{
|
||||||
|
timeAggregation: MetricAggregateOperator.AVG,
|
||||||
|
metricName: newQuery.aggregateAttribute?.key || '',
|
||||||
|
temporality: '',
|
||||||
|
spaceAggregation: MetricAggregateOperator.SUM,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ export const useHandleExplorerTabChange = (): {
|
|||||||
type: string,
|
type: string,
|
||||||
querySearchParameters?: ICurrentQueryData,
|
querySearchParameters?: ICurrentQueryData,
|
||||||
redirectToUrl?: typeof ROUTES[keyof typeof ROUTES],
|
redirectToUrl?: typeof ROUTES[keyof typeof ROUTES],
|
||||||
|
newTab?: boolean,
|
||||||
) => void;
|
) => void;
|
||||||
} => {
|
} => {
|
||||||
const {
|
const {
|
||||||
@@ -63,6 +64,7 @@ export const useHandleExplorerTabChange = (): {
|
|||||||
type: string,
|
type: string,
|
||||||
currentQueryData?: ICurrentQueryData,
|
currentQueryData?: ICurrentQueryData,
|
||||||
redirectToUrl?: typeof ROUTES[keyof typeof ROUTES],
|
redirectToUrl?: typeof ROUTES[keyof typeof ROUTES],
|
||||||
|
newTab?: boolean,
|
||||||
) => {
|
) => {
|
||||||
const newPanelType = type as PANEL_TYPES;
|
const newPanelType = type as PANEL_TYPES;
|
||||||
|
|
||||||
@@ -81,13 +83,21 @@ export const useHandleExplorerTabChange = (): {
|
|||||||
[QueryParams.viewKey]: currentQueryData?.id || viewKey,
|
[QueryParams.viewKey]: currentQueryData?.id || viewKey,
|
||||||
},
|
},
|
||||||
redirectToUrl,
|
redirectToUrl,
|
||||||
|
undefined,
|
||||||
|
newTab,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
redirectWithQueryBuilderData(query, {
|
redirectWithQueryBuilderData(
|
||||||
[QueryParams.panelTypes]: newPanelType,
|
query,
|
||||||
[QueryParams.viewName]: currentQueryData?.name || viewName,
|
{
|
||||||
[QueryParams.viewKey]: currentQueryData?.id || viewKey,
|
[QueryParams.panelTypes]: newPanelType,
|
||||||
});
|
[QueryParams.viewName]: currentQueryData?.name || viewName,
|
||||||
|
[QueryParams.viewKey]: currentQueryData?.id || viewKey,
|
||||||
|
},
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
newTab,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[panelType, getUpdateQuery, redirectWithQueryBuilderData, viewName, viewKey],
|
[panelType, getUpdateQuery, redirectWithQueryBuilderData, viewName, viewKey],
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ export const stepIntervalUnchanged = {
|
|||||||
{
|
{
|
||||||
metricName: '',
|
metricName: '',
|
||||||
temporality: '',
|
temporality: '',
|
||||||
timeAggregation: 'count',
|
timeAggregation: 'avg',
|
||||||
spaceAggregation: 'sum',
|
spaceAggregation: 'sum',
|
||||||
reduceTo: ReduceOperators.AVG,
|
reduceTo: ReduceOperators.AVG,
|
||||||
},
|
},
|
||||||
@@ -177,7 +177,7 @@ export const replaceVariables = {
|
|||||||
{
|
{
|
||||||
metricName: '',
|
metricName: '',
|
||||||
temporality: '',
|
temporality: '',
|
||||||
timeAggregation: 'count',
|
timeAggregation: 'avg',
|
||||||
spaceAggregation: 'sum',
|
spaceAggregation: 'sum',
|
||||||
reduceTo: ReduceOperators.AVG,
|
reduceTo: ReduceOperators.AVG,
|
||||||
},
|
},
|
||||||
@@ -267,7 +267,7 @@ export const defaultOutput = {
|
|||||||
reduceTo: ReduceOperators.AVG,
|
reduceTo: ReduceOperators.AVG,
|
||||||
spaceAggregation: 'sum',
|
spaceAggregation: 'sum',
|
||||||
temporality: '',
|
temporality: '',
|
||||||
timeAggregation: 'count',
|
timeAggregation: 'avg',
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
filter: { expression: '' },
|
filter: { expression: '' },
|
||||||
@@ -392,7 +392,7 @@ export const outputWithFunctions = {
|
|||||||
{
|
{
|
||||||
metricName: '',
|
metricName: '',
|
||||||
temporality: '',
|
temporality: '',
|
||||||
timeAggregation: 'count',
|
timeAggregation: 'avg',
|
||||||
spaceAggregation: 'sum',
|
spaceAggregation: 'sum',
|
||||||
reduceTo: ReduceOperators.AVG,
|
reduceTo: ReduceOperators.AVG,
|
||||||
},
|
},
|
||||||
@@ -429,7 +429,7 @@ export const outputWithFunctions = {
|
|||||||
{
|
{
|
||||||
metricName: '',
|
metricName: '',
|
||||||
temporality: '',
|
temporality: '',
|
||||||
timeAggregation: 'count',
|
timeAggregation: 'avg',
|
||||||
spaceAggregation: 'sum',
|
spaceAggregation: 'sum',
|
||||||
reduceTo: ReduceOperators.AVG,
|
reduceTo: ReduceOperators.AVG,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -947,6 +947,7 @@ export function QueryBuilderProvider({
|
|||||||
searchParams?: Record<string, unknown>,
|
searchParams?: Record<string, unknown>,
|
||||||
redirectingUrl?: typeof ROUTES[keyof typeof ROUTES],
|
redirectingUrl?: typeof ROUTES[keyof typeof ROUTES],
|
||||||
shouldNotStringify?: boolean,
|
shouldNotStringify?: boolean,
|
||||||
|
newTab?: boolean,
|
||||||
) => {
|
) => {
|
||||||
const queryType =
|
const queryType =
|
||||||
!query.queryType || !Object.values(EQueryType).includes(query.queryType)
|
!query.queryType || !Object.values(EQueryType).includes(query.queryType)
|
||||||
@@ -1013,7 +1014,7 @@ export function QueryBuilderProvider({
|
|||||||
? `${redirectingUrl}?${urlQuery}`
|
? `${redirectingUrl}?${urlQuery}`
|
||||||
: `${location.pathname}?${urlQuery}`;
|
: `${location.pathname}?${urlQuery}`;
|
||||||
|
|
||||||
safeNavigate(generatedUrl);
|
safeNavigate(generatedUrl, { newTab });
|
||||||
},
|
},
|
||||||
[location.pathname, safeNavigate, urlQuery],
|
[location.pathname, safeNavigate, urlQuery],
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -72,7 +72,6 @@ export type UseQueryOperations = (
|
|||||||
handleChangeAggregatorAttribute: (
|
handleChangeAggregatorAttribute: (
|
||||||
value: BaseAutocompleteData,
|
value: BaseAutocompleteData,
|
||||||
isEditMode?: boolean,
|
isEditMode?: boolean,
|
||||||
attributeKeys?: BaseAutocompleteData[],
|
|
||||||
) => void;
|
) => void;
|
||||||
handleChangeDataSource: (newSource: DataSource) => void;
|
handleChangeDataSource: (newSource: DataSource) => void;
|
||||||
handleDeleteQuery: () => void;
|
handleDeleteQuery: () => void;
|
||||||
|
|||||||
@@ -278,6 +278,7 @@ export type QueryBuilderContextType = {
|
|||||||
searchParams?: Record<string, unknown>,
|
searchParams?: Record<string, unknown>,
|
||||||
redirectToUrl?: typeof ROUTES[keyof typeof ROUTES],
|
redirectToUrl?: typeof ROUTES[keyof typeof ROUTES],
|
||||||
shallStringify?: boolean,
|
shallStringify?: boolean,
|
||||||
|
newTab?: boolean,
|
||||||
) => void;
|
) => void;
|
||||||
handleRunQuery: () => void;
|
handleRunQuery: () => void;
|
||||||
resetQuery: (newCurrentQuery?: QueryState) => void;
|
resetQuery: (newCurrentQuery?: QueryState) => void;
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||||
@@ -56,11 +57,81 @@ func NewModule(ts telemetrystore.TelemetryStore, telemetryMetadataStore telemetr
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(srikanthccv): use metadata store to fetch metric metadata
|
||||||
func (m *module) ListMetrics(ctx context.Context, orgID valuer.UUID, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error) {
|
func (m *module) ListMetrics(ctx context.Context, orgID valuer.UUID, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error) {
|
||||||
if err := params.Validate(); err != nil {
|
if err := params.Validate(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if params.Source == "meter" {
|
||||||
|
return m.listMeterMetrics(ctx, params)
|
||||||
|
}
|
||||||
|
return m.listMetrics(ctx, orgID, params)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *module) listMeterMetrics(ctx context.Context, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error) {
|
||||||
|
sb := sqlbuilder.NewSelectBuilder()
|
||||||
|
sb.Select(
|
||||||
|
"metric_name",
|
||||||
|
"any(description) AS description",
|
||||||
|
"any(type) AS metric_type",
|
||||||
|
"any(unit) AS metric_unit",
|
||||||
|
"argMax(temporality, unix_milli) AS temporality",
|
||||||
|
"any(is_monotonic) AS is_monotonic",
|
||||||
|
)
|
||||||
|
sb.From(fmt.Sprintf("%s.%s", telemetrymeter.DBName, telemetrymeter.SamplesTableName))
|
||||||
|
|
||||||
|
if params.Start != nil && params.End != nil {
|
||||||
|
sb.Where(sb.Between("unix_milli", *params.Start, *params.End))
|
||||||
|
}
|
||||||
|
|
||||||
|
if params.Search != "" {
|
||||||
|
searchLower := strings.ToLower(params.Search)
|
||||||
|
searchLower = strings.ReplaceAll(searchLower, "%", "\\%")
|
||||||
|
searchLower = strings.ReplaceAll(searchLower, "_", "\\_")
|
||||||
|
sb.Where(sb.Like("lower(metric_name)", fmt.Sprintf("%%%s%%", searchLower)))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.GroupBy("metric_name")
|
||||||
|
sb.OrderBy("metric_name ASC")
|
||||||
|
sb.Limit(params.Limit)
|
||||||
|
|
||||||
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
|
||||||
|
valueCtx := ctxtypes.SetClickhouseMaxThreads(ctx, m.config.TelemetryStore.Threads)
|
||||||
|
db := m.telemetryStore.ClickhouseDB()
|
||||||
|
rows, err := db.Query(valueCtx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to list meter metrics")
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
metrics := make([]metricsexplorertypes.ListMetric, 0)
|
||||||
|
for rows.Next() {
|
||||||
|
var metric metricsexplorertypes.ListMetric
|
||||||
|
if err := rows.Scan(
|
||||||
|
&metric.MetricName,
|
||||||
|
&metric.Description,
|
||||||
|
&metric.MetricType,
|
||||||
|
&metric.MetricUnit,
|
||||||
|
&metric.Temporality,
|
||||||
|
&metric.IsMonotonic,
|
||||||
|
); err != nil {
|
||||||
|
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan meter metric")
|
||||||
|
}
|
||||||
|
metrics = append(metrics, metric)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating meter metrics")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &metricsexplorertypes.ListMetricsResponse{
|
||||||
|
Metrics: metrics,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *module) listMetrics(ctx context.Context, orgID valuer.UUID, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error) {
|
||||||
sb := sqlbuilder.NewSelectBuilder()
|
sb := sqlbuilder.NewSelectBuilder()
|
||||||
sb.Select("DISTINCT metric_name")
|
sb.Select("DISTINCT metric_name")
|
||||||
|
|
||||||
@@ -715,9 +786,8 @@ func (m *module) buildFilterClause(ctx context.Context, filter *qbtypes.Filter,
|
|||||||
Logger: m.logger,
|
Logger: m.logger,
|
||||||
FieldMapper: m.fieldMapper,
|
FieldMapper: m.fieldMapper,
|
||||||
ConditionBuilder: m.condBuilder,
|
ConditionBuilder: m.condBuilder,
|
||||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{
|
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "metric_name", FieldContext: telemetrytypes.FieldContextMetric},
|
||||||
Name: "labels"},
|
FieldKeys: keys,
|
||||||
FieldKeys: keys,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
startNs := querybuilder.ToNanoSecs(uint64(startMillis))
|
startNs := querybuilder.ToNanoSecs(uint64(startMillis))
|
||||||
|
|||||||
@@ -1631,8 +1631,10 @@ func (t *telemetryMetaStore) FetchTemporalityAndTypeMulti(ctx context.Context, q
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
// TODO: return error after table migration are run
|
meterMetricsTemporality, meterMetricsTypes, err := t.fetchMeterSourceMetricsTemporalityAndType(ctx, metricNames...)
|
||||||
meterMetricsTemporality, meterMetricsTypes, _ := t.fetchMeterSourceMetricsTemporalityAndType(ctx, metricNames...)
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// For metrics not found in the database, set to Unknown
|
// For metrics not found in the database, set to Unknown
|
||||||
for _, metricName := range metricNames {
|
for _, metricName := range metricNames {
|
||||||
@@ -1728,6 +1730,7 @@ func (t *telemetryMetaStore) fetchMeterSourceMetricsTemporalityAndType(ctx conte
|
|||||||
"metric_name",
|
"metric_name",
|
||||||
"argMax(temporality, unix_milli) as temporality",
|
"argMax(temporality, unix_milli) as temporality",
|
||||||
"any(type) AS type",
|
"any(type) AS type",
|
||||||
|
"any(is_monotonic) as is_monotonic",
|
||||||
).From(t.meterDBName + "." + t.meterFieldsTblName)
|
).From(t.meterDBName + "." + t.meterFieldsTblName)
|
||||||
|
|
||||||
// Filter by metric names (in the temporality column due to data mix-up)
|
// Filter by metric names (in the temporality column due to data mix-up)
|
||||||
|
|||||||
@@ -301,6 +301,7 @@ type ListMetricsParams struct {
|
|||||||
End *int64 `query:"end"`
|
End *int64 `query:"end"`
|
||||||
Limit int `query:"limit"`
|
Limit int `query:"limit"`
|
||||||
Search string `query:"searchText"`
|
Search string `query:"searchText"`
|
||||||
|
Source string `query:"source"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate ensures ListMetricsParams contains acceptable values.
|
// Validate ensures ListMetricsParams contains acceptable values.
|
||||||
|
|||||||
@@ -447,7 +447,7 @@ type MetricAggregation struct {
|
|||||||
// space aggregation to apply to the query
|
// space aggregation to apply to the query
|
||||||
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
|
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
|
||||||
// param for space aggregation if needed
|
// param for space aggregation if needed
|
||||||
ComparisonSpaceAggregationParam *metrictypes.ComparisonSpaceAggregationParam `json:"comparisonSpaceAggregationParam"`
|
ComparisonSpaceAggregationParam *metrictypes.ComparisonSpaceAggregationParam `json:"comparisonSpaceAggregationParam,omitempty"`
|
||||||
// table hints to use for the query
|
// table hints to use for the query
|
||||||
TableHints *metrictypes.MetricTableHints `json:"-"`
|
TableHints *metrictypes.MetricTableHints `json:"-"`
|
||||||
// value filter to apply to the query
|
// value filter to apply to the query
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ pytest_plugins = [
|
|||||||
"fixtures.logs",
|
"fixtures.logs",
|
||||||
"fixtures.traces",
|
"fixtures.traces",
|
||||||
"fixtures.metrics",
|
"fixtures.metrics",
|
||||||
|
"fixtures.meter",
|
||||||
"fixtures.driver",
|
"fixtures.driver",
|
||||||
"fixtures.idp",
|
"fixtures.idp",
|
||||||
"fixtures.idputils",
|
"fixtures.idputils",
|
||||||
|
|||||||
121
tests/integration/fixtures/meter.py
Normal file
121
tests/integration/fixtures/meter.py
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Any, Callable, Generator, List
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from fixtures import types
|
||||||
|
|
||||||
|
|
||||||
|
class MeterSample:
|
||||||
|
temporality: str
|
||||||
|
metric_name: str
|
||||||
|
description: str
|
||||||
|
unit: str
|
||||||
|
type: str
|
||||||
|
is_monotonic: bool
|
||||||
|
labels: str
|
||||||
|
fingerprint: np.uint64
|
||||||
|
unix_milli: np.int64
|
||||||
|
value: np.float64
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
metric_name: str,
|
||||||
|
labels: dict[str, str],
|
||||||
|
timestamp: datetime,
|
||||||
|
value: float,
|
||||||
|
temporality: str = "Delta",
|
||||||
|
description: str = "",
|
||||||
|
unit: str = "",
|
||||||
|
type_: str = "Sum",
|
||||||
|
is_monotonic: bool = True,
|
||||||
|
) -> None:
|
||||||
|
self.temporality = temporality
|
||||||
|
self.metric_name = metric_name
|
||||||
|
self.description = description
|
||||||
|
self.unit = unit
|
||||||
|
self.type = type_
|
||||||
|
self.is_monotonic = is_monotonic
|
||||||
|
self.labels = json.dumps(labels, separators=(",", ":"))
|
||||||
|
self.unix_milli = np.int64(int(timestamp.timestamp() * 1e3))
|
||||||
|
self.value = np.float64(value)
|
||||||
|
|
||||||
|
fingerprint_str = metric_name + self.labels
|
||||||
|
self.fingerprint = np.uint64(
|
||||||
|
int(hashlib.md5(fingerprint_str.encode()).hexdigest()[:16], 16)
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_samples_row(self) -> list:
|
||||||
|
return [
|
||||||
|
self.temporality,
|
||||||
|
self.metric_name,
|
||||||
|
self.description,
|
||||||
|
self.unit,
|
||||||
|
self.type,
|
||||||
|
self.is_monotonic,
|
||||||
|
self.labels,
|
||||||
|
self.fingerprint,
|
||||||
|
self.unix_milli,
|
||||||
|
self.value,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def make_meter_samples(
|
||||||
|
metric_name: str,
|
||||||
|
labels: dict[str, str],
|
||||||
|
now: datetime,
|
||||||
|
count: int = 60,
|
||||||
|
base_value: float = 100.0,
|
||||||
|
**kwargs,
|
||||||
|
) -> List[MeterSample]:
|
||||||
|
samples = []
|
||||||
|
for i in range(count):
|
||||||
|
ts = now - timedelta(minutes=count - i)
|
||||||
|
samples.append(
|
||||||
|
MeterSample(
|
||||||
|
metric_name=metric_name,
|
||||||
|
labels=labels,
|
||||||
|
timestamp=ts,
|
||||||
|
value=base_value + i,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return samples
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="insert_meter_samples", scope="function")
|
||||||
|
def insert_meter_samples(
|
||||||
|
clickhouse: types.TestContainerClickhouse,
|
||||||
|
) -> Generator[Callable[[List[MeterSample]], None], Any, None]:
|
||||||
|
def _insert_meter_samples(samples: List[MeterSample]) -> None:
|
||||||
|
if len(samples) == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
clickhouse.conn.insert(
|
||||||
|
database="signoz_meter",
|
||||||
|
table="distributed_samples",
|
||||||
|
column_names=[
|
||||||
|
"temporality",
|
||||||
|
"metric_name",
|
||||||
|
"description",
|
||||||
|
"unit",
|
||||||
|
"type",
|
||||||
|
"is_monotonic",
|
||||||
|
"labels",
|
||||||
|
"fingerprint",
|
||||||
|
"unix_milli",
|
||||||
|
"value",
|
||||||
|
],
|
||||||
|
data=[s.to_samples_row() for s in samples],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield _insert_meter_samples
|
||||||
|
|
||||||
|
cluster = clickhouse.env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER"]
|
||||||
|
for table in ["samples", "samples_agg_1d"]:
|
||||||
|
clickhouse.conn.query(
|
||||||
|
f"TRUNCATE TABLE signoz_meter.{table} ON CLUSTER '{cluster}' SYNC"
|
||||||
|
)
|
||||||
@@ -54,6 +54,7 @@ def build_builder_query(
|
|||||||
*,
|
*,
|
||||||
comparisonSpaceAggregationParam: Optional[Dict] = None,
|
comparisonSpaceAggregationParam: Optional[Dict] = None,
|
||||||
temporality: Optional[str] = None,
|
temporality: Optional[str] = None,
|
||||||
|
source: Optional[str] = None,
|
||||||
step_interval: int = DEFAULT_STEP_INTERVAL,
|
step_interval: int = DEFAULT_STEP_INTERVAL,
|
||||||
group_by: Optional[List[str]] = None,
|
group_by: Optional[List[str]] = None,
|
||||||
filter_expression: Optional[str] = None,
|
filter_expression: Optional[str] = None,
|
||||||
@@ -73,10 +74,14 @@ def build_builder_query(
|
|||||||
"stepInterval": step_interval,
|
"stepInterval": step_interval,
|
||||||
"disabled": disabled,
|
"disabled": disabled,
|
||||||
}
|
}
|
||||||
|
if source:
|
||||||
|
spec["source"] = source
|
||||||
if temporality:
|
if temporality:
|
||||||
spec["aggregations"][0]["temporality"] = temporality
|
spec["aggregations"][0]["temporality"] = temporality
|
||||||
if comparisonSpaceAggregationParam:
|
if comparisonSpaceAggregationParam:
|
||||||
spec["aggregations"][0]["comparisonSpaceAggregationParam"] = comparisonSpaceAggregationParam
|
spec["aggregations"][0][
|
||||||
|
"comparisonSpaceAggregationParam"
|
||||||
|
] = comparisonSpaceAggregationParam
|
||||||
if group_by:
|
if group_by:
|
||||||
spec["groupBy"] = [
|
spec["groupBy"] = [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
Look at the histogram_data_1h.jsonl file for the relevant data
|
Look at the histogram_data_1h.jsonl file for the relevant data
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import random
|
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from typing import Callable, List
|
from typing import Callable, List
|
||||||
@@ -22,6 +21,7 @@ from fixtures.utils import get_testdata_file_path
|
|||||||
|
|
||||||
FILE = get_testdata_file_path("histogram_data_1h.jsonl")
|
FILE = get_testdata_file_path("histogram_data_1h.jsonl")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"threshold, operator, first_value, last_value",
|
"threshold, operator, first_value, last_value",
|
||||||
[
|
[
|
||||||
@@ -29,12 +29,22 @@ FILE = get_testdata_file_path("histogram_data_1h.jsonl")
|
|||||||
(100, "<=", 1.1, 6.9),
|
(100, "<=", 1.1, 6.9),
|
||||||
(7500, "<=", 16.75, 74.75),
|
(7500, "<=", 16.75, 74.75),
|
||||||
(8000, "<=", 17, 75),
|
(8000, "<=", 17, 75),
|
||||||
(80000, "<=", 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
(
|
||||||
|
80000,
|
||||||
|
"<=",
|
||||||
|
17,
|
||||||
|
75,
|
||||||
|
), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||||
(1000, ">", 7, 7),
|
(1000, ">", 7, 7),
|
||||||
(100, ">", 16.9, 69.1),
|
(100, ">", 16.9, 69.1),
|
||||||
(7500, ">", 1.25, 1.25),
|
(7500, ">", 1.25, 1.25),
|
||||||
(8000, ">", 1, 1),
|
(8000, ">", 1, 1),
|
||||||
(80000, ">", 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
(
|
||||||
|
80000,
|
||||||
|
">",
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_histogram_count_for_one_endpoint(
|
def test_histogram_count_for_one_endpoint(
|
||||||
@@ -65,10 +75,7 @@ def test_histogram_count_for_one_endpoint(
|
|||||||
metric_name,
|
metric_name,
|
||||||
"increase",
|
"increase",
|
||||||
"count",
|
"count",
|
||||||
comparisonSpaceAggregationParam={
|
comparisonSpaceAggregationParam={"threshold": threshold, "operator": operator},
|
||||||
"threshold": threshold,
|
|
||||||
"operator": operator
|
|
||||||
},
|
|
||||||
filter_expression='endpoint = "/health"',
|
filter_expression='endpoint = "/health"',
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -81,6 +88,7 @@ def test_histogram_count_for_one_endpoint(
|
|||||||
assert result_values[0]["value"] == first_value
|
assert result_values[0]["value"] == first_value
|
||||||
assert result_values[-1]["value"] == last_value
|
assert result_values[-1]["value"] == last_value
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"threshold, operator, first_value, last_value",
|
"threshold, operator, first_value, last_value",
|
||||||
[
|
[
|
||||||
@@ -88,12 +96,22 @@ def test_histogram_count_for_one_endpoint(
|
|||||||
(100, "<=", 2.2, 13.8),
|
(100, "<=", 2.2, 13.8),
|
||||||
(7500, "<=", 33.5, 149.5),
|
(7500, "<=", 33.5, 149.5),
|
||||||
(8000, "<=", 34, 150),
|
(8000, "<=", 34, 150),
|
||||||
(80000, "<=", 34, 150), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
(
|
||||||
|
80000,
|
||||||
|
"<=",
|
||||||
|
34,
|
||||||
|
150,
|
||||||
|
), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||||
(1000, ">", 14, 14),
|
(1000, ">", 14, 14),
|
||||||
(100, ">", 33.8, 138.2),
|
(100, ">", 33.8, 138.2),
|
||||||
(7500, ">", 2.5, 2.5),
|
(7500, ">", 2.5, 2.5),
|
||||||
(8000, ">", 2, 2),
|
(8000, ">", 2, 2),
|
||||||
(80000, ">", 2, 2), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
(
|
||||||
|
80000,
|
||||||
|
">",
|
||||||
|
2,
|
||||||
|
2,
|
||||||
|
), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_histogram_count_for_one_service(
|
def test_histogram_count_for_one_service(
|
||||||
@@ -124,10 +142,7 @@ def test_histogram_count_for_one_service(
|
|||||||
metric_name,
|
metric_name,
|
||||||
"increase",
|
"increase",
|
||||||
"count",
|
"count",
|
||||||
comparisonSpaceAggregationParam={
|
comparisonSpaceAggregationParam={"threshold": threshold, "operator": operator},
|
||||||
"threshold": threshold,
|
|
||||||
"operator": operator
|
|
||||||
},
|
|
||||||
filter_expression='service = "api"',
|
filter_expression='service = "api"',
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -140,6 +155,7 @@ def test_histogram_count_for_one_service(
|
|||||||
assert result_values[0]["value"] == first_value
|
assert result_values[0]["value"] == first_value
|
||||||
assert result_values[-1]["value"] == last_value
|
assert result_values[-1]["value"] == last_value
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"threshold, operator, zeroth_value, first_value, last_value",
|
"threshold, operator, zeroth_value, first_value, last_value",
|
||||||
[
|
[
|
||||||
@@ -147,12 +163,24 @@ def test_histogram_count_for_one_service(
|
|||||||
(100, "<=", 1234.5, 1.1, 6.9),
|
(100, "<=", 1234.5, 1.1, 6.9),
|
||||||
(7500, "<=", 12345, 16.75, 74.75),
|
(7500, "<=", 12345, 16.75, 74.75),
|
||||||
(8000, "<=", 12345, 17, 75),
|
(8000, "<=", 12345, 17, 75),
|
||||||
(80000, "<=", 12345, 17, 75), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
(
|
||||||
|
80000,
|
||||||
|
"<=",
|
||||||
|
12345,
|
||||||
|
17,
|
||||||
|
75,
|
||||||
|
), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||||
(1000, ">", 0, 7, 7),
|
(1000, ">", 0, 7, 7),
|
||||||
(100, ">", 11110.5, 16.9, 69.1),
|
(100, ">", 11110.5, 16.9, 69.1),
|
||||||
(7500, ">", 0, 1.25, 1.25),
|
(7500, ">", 0, 1.25, 1.25),
|
||||||
(8000, ">", 0, 1, 1),
|
(8000, ">", 0, 1, 1),
|
||||||
(80000, ">", 0, 1, 1), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
(
|
||||||
|
80000,
|
||||||
|
">",
|
||||||
|
0,
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
), ## cuz we don't know the max value in infinity, all numbers beyond the biggest finite bucket will report the same answer
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_histogram_count_for_delta_service(
|
def test_histogram_count_for_delta_service(
|
||||||
@@ -184,10 +212,7 @@ def test_histogram_count_for_delta_service(
|
|||||||
metric_name,
|
metric_name,
|
||||||
"increase",
|
"increase",
|
||||||
"count",
|
"count",
|
||||||
comparisonSpaceAggregationParam={
|
comparisonSpaceAggregationParam={"threshold": threshold, "operator": operator},
|
||||||
"threshold": threshold,
|
|
||||||
"operator": operator
|
|
||||||
},
|
|
||||||
filter_expression='service = "web"',
|
filter_expression='service = "web"',
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -196,11 +221,16 @@ def test_histogram_count_for_delta_service(
|
|||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||||
assert len(result_values) == 60 ## in delta, the value at 10:01 will also be reported
|
assert (
|
||||||
|
len(result_values) == 60
|
||||||
|
) ## in delta, the value at 10:01 will also be reported
|
||||||
assert result_values[0]["value"] == zeroth_value
|
assert result_values[0]["value"] == zeroth_value
|
||||||
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
|
assert (
|
||||||
|
result_values[1]["value"] == first_value
|
||||||
|
) ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
|
||||||
assert result_values[-1]["value"] == last_value
|
assert result_values[-1]["value"] == last_value
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"threshold, operator, zeroth_value, first_value, last_value",
|
"threshold, operator, zeroth_value, first_value, last_value",
|
||||||
[
|
[
|
||||||
@@ -245,10 +275,7 @@ def test_histogram_count_for_all_services(
|
|||||||
metric_name,
|
metric_name,
|
||||||
"increase",
|
"increase",
|
||||||
"count",
|
"count",
|
||||||
comparisonSpaceAggregationParam={
|
comparisonSpaceAggregationParam={"threshold": threshold, "operator": operator},
|
||||||
"threshold": threshold,
|
|
||||||
"operator": operator
|
|
||||||
},
|
|
||||||
## no services filter, this tests for multitemporality handling as well
|
## no services filter, this tests for multitemporality handling as well
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -257,11 +284,16 @@ def test_histogram_count_for_all_services(
|
|||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||||
assert len(result_values) == 60 ## in delta, the value at 10:01 will also be reported
|
assert (
|
||||||
|
len(result_values) == 60
|
||||||
|
) ## in delta, the value at 10:01 will also be reported
|
||||||
assert result_values[0]["value"] == zeroth_value
|
assert result_values[0]["value"] == zeroth_value
|
||||||
assert result_values[1]["value"] == first_value ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
|
assert (
|
||||||
|
result_values[1]["value"] == first_value
|
||||||
|
) ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
|
||||||
assert result_values[-1]["value"] == last_value
|
assert result_values[-1]["value"] == last_value
|
||||||
|
|
||||||
|
|
||||||
def test_histogram_count_no_param(
|
def test_histogram_count_no_param(
|
||||||
signoz: types.SigNoz,
|
signoz: types.SigNoz,
|
||||||
create_user_admin: None, # pylint: disable=unused-argument
|
create_user_admin: None, # pylint: disable=unused-argument
|
||||||
@@ -308,8 +340,26 @@ def test_histogram_count_no_param(
|
|||||||
set(le_buckets.keys()) == expected_buckets
|
set(le_buckets.keys()) == expected_buckets
|
||||||
), f"Expected endpoints {expected_buckets}, got {set(le_buckets.keys())}"
|
), f"Expected endpoints {expected_buckets}, got {set(le_buckets.keys())}"
|
||||||
|
|
||||||
first_values = {"1000": 33, "1500": 36, "2000": 39, "4000": 42, "5000": 45, "6000": 48, "8000": 51, "+Inf": 54}
|
first_values = {
|
||||||
last_values = {"1000": 207, "1500": 210, "2000": 213, "4000": 216, "5000": 219, "6000": 222, "8000": 225, "+Inf": 228}
|
"1000": 33,
|
||||||
|
"1500": 36,
|
||||||
|
"2000": 39,
|
||||||
|
"4000": 42,
|
||||||
|
"5000": 45,
|
||||||
|
"6000": 48,
|
||||||
|
"8000": 51,
|
||||||
|
"+Inf": 54,
|
||||||
|
}
|
||||||
|
last_values = {
|
||||||
|
"1000": 207,
|
||||||
|
"1500": 210,
|
||||||
|
"2000": 213,
|
||||||
|
"4000": 216,
|
||||||
|
"5000": 219,
|
||||||
|
"6000": 222,
|
||||||
|
"8000": 225,
|
||||||
|
"+Inf": 228,
|
||||||
|
}
|
||||||
for le, values in le_buckets.items():
|
for le, values in le_buckets.items():
|
||||||
assert len(values) == 60
|
assert len(values) == 60
|
||||||
|
|
||||||
@@ -318,5 +368,7 @@ def test_histogram_count_no_param(
|
|||||||
v["value"] >= 0
|
v["value"] >= 0
|
||||||
), f"Count for {le} should not be negative: {v['value']}"
|
), f"Count for {le} should not be negative: {v['value']}"
|
||||||
assert values[0]["value"] == 12345
|
assert values[0]["value"] == 12345
|
||||||
assert values[1]["value"] == first_values[le] ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
|
assert (
|
||||||
assert values[-1]["value"] == last_values[le]
|
values[1]["value"] == first_values[le]
|
||||||
|
) ## to keep parallel to the cumulative test cases, first_value refers to the value at 10:02
|
||||||
|
assert values[-1]["value"] == last_values[le]
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import random
|
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from typing import Callable, List
|
from typing import Callable, List
|
||||||
@@ -10,7 +9,6 @@ from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
|||||||
from fixtures.metrics import Metrics
|
from fixtures.metrics import Metrics
|
||||||
from fixtures.querier import (
|
from fixtures.querier import (
|
||||||
build_builder_query,
|
build_builder_query,
|
||||||
get_all_series,
|
|
||||||
get_series_values,
|
get_series_values,
|
||||||
make_query_request,
|
make_query_request,
|
||||||
)
|
)
|
||||||
@@ -18,6 +16,7 @@ from fixtures.utils import get_testdata_file_path
|
|||||||
|
|
||||||
FILE = get_testdata_file_path("gauge_data_1h.jsonl")
|
FILE = get_testdata_file_path("gauge_data_1h.jsonl")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"time_agg, space_agg, service, num_elements, start_val, first_val, twentieth_min_val, after_twentieth_min_val",
|
"time_agg, space_agg, service, num_elements, start_val, first_val, twentieth_min_val, after_twentieth_min_val",
|
||||||
[
|
[
|
||||||
@@ -50,7 +49,7 @@ def test_for_one_service(
|
|||||||
start_val: float,
|
start_val: float,
|
||||||
first_val: float,
|
first_val: float,
|
||||||
twentieth_min_val: float,
|
twentieth_min_val: float,
|
||||||
after_twentieth_min_val: float ## web service has a gap of 10 mins after the 20th minute
|
after_twentieth_min_val: float, ## web service has a gap of 10 mins after the 20th minute
|
||||||
) -> None:
|
) -> None:
|
||||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||||
@@ -84,6 +83,7 @@ def test_for_one_service(
|
|||||||
assert result_values[19]["value"] == twentieth_min_val
|
assert result_values[19]["value"] == twentieth_min_val
|
||||||
assert result_values[20]["value"] == after_twentieth_min_val
|
assert result_values[20]["value"] == after_twentieth_min_val
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"time_agg, space_agg, start_val, first_val, twentieth_min_val, twenty_first_min_val, thirty_first_min_val",
|
"time_agg, space_agg, start_val, first_val, twentieth_min_val, twenty_first_min_val, thirty_first_min_val",
|
||||||
[
|
[
|
||||||
@@ -105,8 +105,8 @@ def test_for_multiple_aggregations(
|
|||||||
start_val: float,
|
start_val: float,
|
||||||
first_val: float,
|
first_val: float,
|
||||||
twentieth_min_val: float,
|
twentieth_min_val: float,
|
||||||
twenty_first_min_val: float, ## web service has a gap of 10 mins after the 20th minute
|
twenty_first_min_val: float, ## web service has a gap of 10 mins after the 20th minute
|
||||||
thirty_first_min_val: float
|
thirty_first_min_val: float,
|
||||||
) -> None:
|
) -> None:
|
||||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||||
@@ -138,4 +138,4 @@ def test_for_multiple_aggregations(
|
|||||||
assert result_values[1]["value"] == first_val
|
assert result_values[1]["value"] == first_val
|
||||||
assert result_values[19]["value"] == twentieth_min_val
|
assert result_values[19]["value"] == twentieth_min_val
|
||||||
assert result_values[20]["value"] == twenty_first_min_val
|
assert result_values[20]["value"] == twenty_first_min_val
|
||||||
assert result_values[30]["value"] == thirty_first_min_val
|
assert result_values[30]["value"] == thirty_first_min_val
|
||||||
|
|||||||
@@ -53,7 +53,9 @@ def test_rate_with_steady_values_and_reset(
|
|||||||
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||||
assert len(result_values) == 60 ## total 61 minutes covered, and 30th minute is missing
|
assert (
|
||||||
|
len(result_values) == 60
|
||||||
|
) ## total 61 minutes covered, and 30th minute is missing
|
||||||
assert (
|
assert (
|
||||||
result_values[30]["value"] == 0.0333
|
result_values[30]["value"] == 0.0333
|
||||||
) # reset happens and [30] is for 31st minute. 2/60 cuz delta divides by step interval
|
) # reset happens and [30] is for 31st minute. 2/60 cuz delta divides by step interval
|
||||||
@@ -61,9 +63,7 @@ def test_rate_with_steady_values_and_reset(
|
|||||||
result_values[31]["value"] == 0.133
|
result_values[31]["value"] == 0.133
|
||||||
) # i.e 8/60 i.e 31st to 32nd minute changes
|
) # i.e 8/60 i.e 31st to 32nd minute changes
|
||||||
count_of_steady_rate = sum(1 for v in result_values if v["value"] == 0.0833)
|
count_of_steady_rate = sum(1 for v in result_values if v["value"] == 0.0833)
|
||||||
assert (
|
assert count_of_steady_rate == 58 # 1 reset + 1 high rate are excluded
|
||||||
count_of_steady_rate == 58
|
|
||||||
) # 1 reset + 1 high rate are excluded
|
|
||||||
# All rates should be non-negative (stale periods = 0 rate)
|
# All rates should be non-negative (stale periods = 0 rate)
|
||||||
for v in result_values:
|
for v in result_values:
|
||||||
assert v["value"] >= 0, f"Rate should not be negative: {v['value']}"
|
assert v["value"] >= 0, f"Rate should not be negative: {v['value']}"
|
||||||
|
|||||||
110
tests/integration/src/querier/11_cost_meter.py
Normal file
110
tests/integration/src/querier/11_cost_meter.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from http import HTTPStatus
|
||||||
|
from typing import Callable, List
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from fixtures import types
|
||||||
|
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||||
|
from fixtures.meter import MeterSample, make_meter_samples
|
||||||
|
from fixtures.querier import (
|
||||||
|
build_builder_query,
|
||||||
|
get_series_values,
|
||||||
|
make_query_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_query_range_cost_meter(
|
||||||
|
signoz: types.SigNoz,
|
||||||
|
create_user_admin: None, # pylint: disable=unused-argument
|
||||||
|
get_token: Callable[[str, str], str],
|
||||||
|
insert_meter_samples: Callable[[List[MeterSample]], None],
|
||||||
|
) -> None:
|
||||||
|
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||||
|
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||||
|
end_ms = int(now.timestamp() * 1000)
|
||||||
|
|
||||||
|
metric_name = "signoz_cost_test_query_range"
|
||||||
|
labels = {"service": "test-service", "environment": "production"}
|
||||||
|
|
||||||
|
samples = make_meter_samples(
|
||||||
|
metric_name,
|
||||||
|
labels,
|
||||||
|
now,
|
||||||
|
count=60,
|
||||||
|
base_value=100.0,
|
||||||
|
temporality="Delta",
|
||||||
|
type_="Sum",
|
||||||
|
is_monotonic=True,
|
||||||
|
)
|
||||||
|
insert_meter_samples(samples)
|
||||||
|
|
||||||
|
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||||
|
query = build_builder_query(
|
||||||
|
"A",
|
||||||
|
metric_name,
|
||||||
|
"sum",
|
||||||
|
"sum",
|
||||||
|
source="meter",
|
||||||
|
temporality="delta",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = make_query_request(signoz, token, start_ms, end_ms, [query])
|
||||||
|
assert response.status_code == HTTPStatus.OK
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
result_values = get_series_values(data, "A")
|
||||||
|
assert len(result_values) > 0, f"Expected non-empty results, got: {data}"
|
||||||
|
|
||||||
|
for val in result_values:
|
||||||
|
assert val["value"] >= 0, f"Expected non-negative value, got: {val['value']}"
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_meter_metric_names(
|
||||||
|
signoz: types.SigNoz,
|
||||||
|
create_user_admin: None, # pylint: disable=unused-argument
|
||||||
|
get_token: Callable[[str, str], str],
|
||||||
|
insert_meter_samples: Callable[[List[MeterSample]], None],
|
||||||
|
) -> None:
|
||||||
|
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||||
|
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||||
|
end_ms = int(now.timestamp() * 1000)
|
||||||
|
|
||||||
|
metric_name = "cost_test_list_metrics"
|
||||||
|
labels = {"service": "billing-service"}
|
||||||
|
|
||||||
|
samples = make_meter_samples(
|
||||||
|
metric_name,
|
||||||
|
labels,
|
||||||
|
now,
|
||||||
|
count=5,
|
||||||
|
base_value=50.0,
|
||||||
|
temporality="Delta",
|
||||||
|
type_="Sum",
|
||||||
|
is_monotonic=True,
|
||||||
|
)
|
||||||
|
insert_meter_samples(samples)
|
||||||
|
|
||||||
|
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||||
|
|
||||||
|
response = requests.get(
|
||||||
|
signoz.self.host_configs["8080"].get("/api/v2/metrics"),
|
||||||
|
params={
|
||||||
|
"start": start_ms,
|
||||||
|
"end": end_ms,
|
||||||
|
"limit": 100,
|
||||||
|
"searchText": "cost_test_list",
|
||||||
|
"source": "meter",
|
||||||
|
},
|
||||||
|
headers={"authorization": f"Bearer {token}"},
|
||||||
|
timeout=30,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == HTTPStatus.OK
|
||||||
|
|
||||||
|
data = response.json()
|
||||||
|
metrics = data.get("data", {}).get("metrics", [])
|
||||||
|
metric_names = [m["metricName"] for m in metrics]
|
||||||
|
assert (
|
||||||
|
metric_name in metric_names
|
||||||
|
), f"Expected {metric_name} in metric names, got: {metric_names}"
|
||||||
Reference in New Issue
Block a user