mirror of
https://github.com/SigNoz/signoz.git
synced 2026-05-14 14:10:32 +01:00
Compare commits
42 Commits
fix/resour
...
nv/tags
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
29e14ce9c6 | ||
|
|
d7dc789a58 | ||
|
|
4f263304f0 | ||
|
|
2680f7163f | ||
|
|
d2d129eea9 | ||
|
|
9e1704615f | ||
|
|
db06557c12 | ||
|
|
1475e2b53a | ||
|
|
e58119a416 | ||
|
|
d354044fbe | ||
|
|
fe6cbc3c0c | ||
|
|
45fa0c739c | ||
|
|
e1527dd148 | ||
|
|
5063be6467 | ||
|
|
b453655dea | ||
|
|
b47bc6bcc4 | ||
|
|
5321e9ee87 | ||
|
|
e82f568a27 | ||
|
|
c2aac3a278 | ||
|
|
ddfec3e5f7 | ||
|
|
af623f66e8 | ||
|
|
10ecb7524c | ||
|
|
8fc21ca6b9 | ||
|
|
422149369d | ||
|
|
2063697350 | ||
|
|
685faa9211 | ||
|
|
de6fcb9fbb | ||
|
|
828619a9e6 | ||
|
|
d87e7241c0 | ||
|
|
ae184315a9 | ||
|
|
29f782a3a0 | ||
|
|
71d8dafce1 | ||
|
|
412320d7d9 | ||
|
|
9b5d78b5a0 | ||
|
|
444464ae15 | ||
|
|
d5841f8daa | ||
|
|
c344cd256f | ||
|
|
5e61be1606 | ||
|
|
173037d3be | ||
|
|
d12c846212 | ||
|
|
4e5bd7cf6f | ||
|
|
bd11e985e1 |
@@ -5,9 +5,15 @@ cd frontend && pnpm run commitlint --edit $1
|
||||
|
||||
branch="$(git rev-parse --abbrev-ref HEAD)"
|
||||
|
||||
color_red="$(tput setaf 1)"
|
||||
bold="$(tput bold)"
|
||||
reset="$(tput sgr0)"
|
||||
if [ -n "$TERM" ] && [ "$TERM" != "dumb" ]; then
|
||||
color_red="$(tput setaf 1)"
|
||||
bold="$(tput bold)"
|
||||
reset="$(tput sgr0)"
|
||||
else
|
||||
color_red=""
|
||||
bold=""
|
||||
reset=""
|
||||
fi
|
||||
|
||||
if [ "$branch" = "main" ]; then
|
||||
echo "${color_red}${bold}You can't commit directly to the main branch${reset}"
|
||||
|
||||
@@ -53,7 +53,7 @@
|
||||
"@signozhq/design-tokens": "2.1.4",
|
||||
"@signozhq/icons": "0.4.0",
|
||||
"@signozhq/resizable": "0.0.2",
|
||||
"@signozhq/ui": "0.0.18",
|
||||
"@signozhq/ui": "0.0.19",
|
||||
"@tanstack/react-table": "8.21.3",
|
||||
"@tanstack/react-virtual": "3.13.22",
|
||||
"@uiw/codemirror-theme-copilot": "4.23.11",
|
||||
|
||||
76
frontend/pnpm-lock.yaml
generated
76
frontend/pnpm-lock.yaml
generated
@@ -89,8 +89,8 @@ importers:
|
||||
specifier: 0.0.2
|
||||
version: 0.0.2(@types/react@18.0.26)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||
'@signozhq/ui':
|
||||
specifier: 0.0.18
|
||||
version: 0.0.18(@emotion/is-prop-valid@1.2.0)(@signozhq/icons@0.4.0)(@types/react-dom@18.0.10)(@types/react@18.0.26)(react-dom@18.2.0(react@18.2.0))(react-router-dom@5.3.4(react@18.2.0))(react-router@6.27.0(react@18.2.0))(react@18.2.0)
|
||||
specifier: 0.0.19
|
||||
version: 0.0.19(@emotion/is-prop-valid@1.2.0)(@signozhq/icons@0.4.0)(@types/react-dom@18.0.10)(@types/react@18.0.26)(react-dom@18.2.0(react@18.2.0))(react-router-dom@5.3.4(react@18.2.0))(react-router@6.27.0(react@18.2.0))(react@18.2.0)
|
||||
'@tanstack/react-table':
|
||||
specifier: 8.21.3
|
||||
version: 8.21.3(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||
@@ -1907,89 +1907,105 @@ packages:
|
||||
resolution: {integrity: sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-libvips-linux-arm@1.2.4':
|
||||
resolution: {integrity: sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-libvips-linux-ppc64@1.2.4':
|
||||
resolution: {integrity: sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-libvips-linux-riscv64@1.2.4':
|
||||
resolution: {integrity: sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-libvips-linux-s390x@1.2.4':
|
||||
resolution: {integrity: sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-libvips-linux-x64@1.2.4':
|
||||
resolution: {integrity: sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-libvips-linuxmusl-arm64@1.2.4':
|
||||
resolution: {integrity: sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@img/sharp-libvips-linuxmusl-x64@1.2.4':
|
||||
resolution: {integrity: sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@img/sharp-linux-arm64@0.34.5':
|
||||
resolution: {integrity: sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-linux-arm@0.34.5':
|
||||
resolution: {integrity: sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-linux-ppc64@0.34.5':
|
||||
resolution: {integrity: sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-linux-riscv64@0.34.5':
|
||||
resolution: {integrity: sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-linux-s390x@0.34.5':
|
||||
resolution: {integrity: sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-linux-x64@0.34.5':
|
||||
resolution: {integrity: sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@img/sharp-linuxmusl-arm64@0.34.5':
|
||||
resolution: {integrity: sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@img/sharp-linuxmusl-x64@0.34.5':
|
||||
resolution: {integrity: sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==}
|
||||
engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@img/sharp-wasm32@0.34.5':
|
||||
resolution: {integrity: sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==}
|
||||
@@ -2344,48 +2360,56 @@ packages:
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxfmt/binding-linux-arm64-musl@0.47.0':
|
||||
resolution: {integrity: sha512-IxtQC/sbBi4ubbY+MdwdanRWrG9InQJVZqyMsBa5IUaQcnSg86gQme574HxXMC1p4bo4YhV99zQ+wNnGCvEgzw==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@oxfmt/binding-linux-ppc64-gnu@0.47.0':
|
||||
resolution: {integrity: sha512-EWXEhOMbWO0q6eJSbu0QLkU8cKi0ljlYLngeDs2Ocu/pm1rrLwyQiYzlFbdnMRURI4w9ndr1sI9rSbhlJ5o23Q==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxfmt/binding-linux-riscv64-gnu@0.47.0':
|
||||
resolution: {integrity: sha512-tZrjS11TUiDuEpRaqdk8K9F9xETRyKXfuZKmdeW+Gj7coBnm7+8sBEfyt033EAFEQSlkniAXvBLh+Qja2ioGBQ==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxfmt/binding-linux-riscv64-musl@0.47.0':
|
||||
resolution: {integrity: sha512-KBFy+2CFKUCZzYwX2ZOPQKck1vjQbz+hextuc19G4r0WRJwadfAeuQMQRQvB+Ivc8brlbOVg7et8K7E467440g==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@oxfmt/binding-linux-s390x-gnu@0.47.0':
|
||||
resolution: {integrity: sha512-REUPFKVGSiK99B+9eaPhluEVglzaoj/SMykNC5SUiV2RSsBfV5lWN7Y0iCIc251Wz3GaeAGZsJ/zj3gjarxdFg==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxfmt/binding-linux-x64-gnu@0.47.0':
|
||||
resolution: {integrity: sha512-KVftVSVEDeIfRW3TIeLe3aNI/iY4m1fu5mDwHcisKMZSCMKLkrhFsjowC7o9RoqNPxbbglm2+/6KAKBIts2t0Q==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxfmt/binding-linux-x64-musl@0.47.0':
|
||||
resolution: {integrity: sha512-DTsmGEaA2860Aq5VUyDO8/MT9NFxwVL93RnRYmpMwK6DsSkThmvEpqoUDDljziEpAedMRG19SCogrNbINSbLUQ==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@oxfmt/binding-openharmony-arm64@0.47.0':
|
||||
resolution: {integrity: sha512-8r5BDro7fLOBoq1JXHLVSs55OlrxQhEso4HVo0TcY7OXJUPYfjPoOaYL5us+yIwqyP9rQwN+rxuiNFSmaxSuOQ==}
|
||||
@@ -2488,48 +2512,56 @@ packages:
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxlint/binding-linux-arm64-musl@1.62.0':
|
||||
resolution: {integrity: sha512-8eCy3FCDuWUM5hWujAv6heMvfZPbcCOU3SdQUAkixZLu5bSzOkNfirJiLGoQFO943xceOKkiQRMQNzH++jM3WA==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@oxlint/binding-linux-ppc64-gnu@1.62.0':
|
||||
resolution: {integrity: sha512-NjQ7K7tpTPDe9J+yq8p/s/J0E7lRCkK2uDBDqvT4XIT6f4Z0tlnr59OBg/WcrmVHER1AbrcfyxhGTXgcG8ytWg==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxlint/binding-linux-riscv64-gnu@1.62.0':
|
||||
resolution: {integrity: sha512-oKZed9gmSwze29dEt3/Wnsv6l/Ygw/FUst+8Kfpv2SGeS/glEoTGZAMQw37SVyzFV76UTHJN2snGgxK2t2+8ow==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxlint/binding-linux-riscv64-musl@1.62.0':
|
||||
resolution: {integrity: sha512-gBjBxQ+9lGpAYq+ELqw0w8QXsBnkZclFc7GRX2r0LnEVn3ZTEqeIKpKcGjucmp76Q53bvJD0i4qBWBhcfhSfGA==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@oxlint/binding-linux-s390x-gnu@1.62.0':
|
||||
resolution: {integrity: sha512-Ew2Kxs9EQ9/mbAIJ2hvocMC0wsOu6YKzStI2eFBDt+Td5O8seVC/oxgRIHqCcl5sf5ratA1nozQBAuv7tphkHg==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxlint/binding-linux-x64-gnu@1.62.0':
|
||||
resolution: {integrity: sha512-5z25jcAA0gfKyVwz71A0VXgaPlocPoTAxhlv/hgoK6tlCrfoNuw7haWbDHvGMfjXhdic4EqVXGRv5XsTqFnbRQ==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@oxlint/binding-linux-x64-musl@1.62.0':
|
||||
resolution: {integrity: sha512-IWpHmMB6ZDllPvqWDkG6AmXrN7JF5e/c4g/0PuURsmlK+vHoYZPB70rr4u1bn3I4LsKCSpqqfveyx6UCOC8wdg==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@oxlint/binding-openharmony-arm64@1.62.0':
|
||||
resolution: {integrity: sha512-fjlSxxrD5pA594vkyikCS9MnPRjQawW6/BLgyTYkO+73wwPlYjkcZ7LSd974l0Q2zkHQmu4DPvJFLYA7o8xrxQ==}
|
||||
@@ -2584,36 +2616,42 @@ packages:
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@parcel/watcher-linux-arm-musl@2.5.1':
|
||||
resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@parcel/watcher-linux-arm64-glibc@2.5.1':
|
||||
resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@parcel/watcher-linux-arm64-musl@2.5.1':
|
||||
resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@parcel/watcher-linux-x64-glibc@2.5.1':
|
||||
resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@parcel/watcher-linux-x64-musl@2.5.1':
|
||||
resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@parcel/watcher-win32-arm64@2.5.1':
|
||||
resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==}
|
||||
@@ -3474,24 +3512,28 @@ packages:
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@rolldown/binding-linux-arm64-musl@1.0.0-beta.53':
|
||||
resolution: {integrity: sha512-bGe5EBB8FVjHBR1mOLOPEFg1Lp3//7geqWkU5NIhxe+yH0W8FVrQ6WRYOap4SUTKdklD/dC4qPLREkMMQ855FA==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@rolldown/binding-linux-x64-gnu@1.0.0-beta.53':
|
||||
resolution: {integrity: sha512-qL+63WKVQs1CMvFedlPt0U9PiEKJOAL/bsHMKUDS6Vp2Q+YAv/QLPu8rcvkfIMvQ0FPU2WL0aX4eWwF6e/GAnA==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@rolldown/binding-linux-x64-musl@1.0.0-beta.53':
|
||||
resolution: {integrity: sha512-VGl9JIGjoJh3H8Mb+7xnVqODajBmrdOOb9lxWXdcmxyI+zjB2sux69br0hZJDTyLJfvBoYm439zPACYbCjGRmw==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@rolldown/binding-openharmony-arm64@1.0.0-beta.53':
|
||||
resolution: {integrity: sha512-B4iIserJXuSnNzA5xBLFUIjTfhNy7d9sq4FUMQY3GhQWGVhS2RWWzzDnkSU6MUt7/aHUrep0CdQfXUJI9D3W7A==}
|
||||
@@ -3644,8 +3686,8 @@ packages:
|
||||
peerDependencies:
|
||||
react: ^18.2.0
|
||||
|
||||
'@signozhq/ui@0.0.18':
|
||||
resolution: {integrity: sha512-1p3ALh76kafiz5yX7ReNKVcHDt2od7CcZD/Vx9i2adTwTeynkLJcEfVoXoJD3oh1kKTleooOiOjRyxlA7VzmSA==}
|
||||
'@signozhq/ui@0.0.19':
|
||||
resolution: {integrity: sha512-2q6aRxN/PR4PlR2xJZAREEuvLPiDFggfFKzCW2Z5vHVVbrgnvZHWD1jPUuwszfEg0ceH3UvkwqceO7wN4uRJAA==}
|
||||
peerDependencies:
|
||||
'@signozhq/icons': 0.3.0
|
||||
react: ^18.2.0
|
||||
@@ -4266,41 +4308,49 @@ packages:
|
||||
resolution: {integrity: sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@unrs/resolver-binding-linux-arm64-musl@1.11.1':
|
||||
resolution: {integrity: sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@unrs/resolver-binding-linux-ppc64-gnu@1.11.1':
|
||||
resolution: {integrity: sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@unrs/resolver-binding-linux-riscv64-gnu@1.11.1':
|
||||
resolution: {integrity: sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@unrs/resolver-binding-linux-riscv64-musl@1.11.1':
|
||||
resolution: {integrity: sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@unrs/resolver-binding-linux-s390x-gnu@1.11.1':
|
||||
resolution: {integrity: sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@unrs/resolver-binding-linux-x64-gnu@1.11.1':
|
||||
resolution: {integrity: sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
'@unrs/resolver-binding-linux-x64-musl@1.11.1':
|
||||
resolution: {integrity: sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
'@unrs/resolver-binding-wasm32-wasi@1.11.1':
|
||||
resolution: {integrity: sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==}
|
||||
@@ -4367,7 +4417,7 @@ packages:
|
||||
resolution: {integrity: sha512-VIcFLdRi/VYRU8OL/puL7QXMYafHmqOnwTZY50U1JPlCNj30PxCMx65c494b1K9be9hX83KVt0+gTEwTWLqToA==}
|
||||
engines: {node: ^20.19.0 || >=22.12.0}
|
||||
peerDependencies:
|
||||
vite: npm:rolldown-vite@7.3.1
|
||||
vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0
|
||||
|
||||
'@webassemblyjs/ast@1.14.1':
|
||||
resolution: {integrity: sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==}
|
||||
@@ -7194,24 +7244,28 @@ packages:
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
lightningcss-linux-arm64-musl@1.31.1:
|
||||
resolution: {integrity: sha512-mVZ7Pg2zIbe3XlNbZJdjs86YViQFoJSpc41CbVmKBPiGmC4YrfeOyz65ms2qpAobVd7WQsbW4PdsSJEMymyIMg==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
lightningcss-linux-x64-gnu@1.31.1:
|
||||
resolution: {integrity: sha512-xGlFWRMl+0KvUhgySdIaReQdB4FNudfUTARn7q0hh/V67PVGCs3ADFjw+6++kG1RNd0zdGRlEKa+T13/tQjPMA==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [glibc]
|
||||
|
||||
lightningcss-linux-x64-musl@1.31.1:
|
||||
resolution: {integrity: sha512-eowF8PrKHw9LpoZii5tdZwnBcYDxRw2rRCyvAXLi34iyeYfqCQNA9rmUM0ce62NlPhCvof1+9ivRaTY6pSKDaA==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
libc: [musl]
|
||||
|
||||
lightningcss-win32-arm64-msvc@1.31.1:
|
||||
resolution: {integrity: sha512-aJReEbSEQzx1uBlQizAOBSjcmr9dCdL3XuC/6HLXAxmtErsj2ICo5yYggg1qOODQMtnjNQv2UHb9NpOuFtYe4w==}
|
||||
@@ -10239,7 +10293,7 @@ packages:
|
||||
oxlint: '>=1'
|
||||
stylelint: '>=16'
|
||||
typescript: '*'
|
||||
vite: npm:rolldown-vite@7.3.1
|
||||
vite: '>=5.4.21'
|
||||
vls: '*'
|
||||
vti: '*'
|
||||
vue-tsc: ~2.2.10 || ^3.0.0
|
||||
@@ -10268,12 +10322,12 @@ packages:
|
||||
vite-plugin-compression@0.5.1:
|
||||
resolution: {integrity: sha512-5QJKBDc+gNYVqL/skgFAP81Yuzo9R+EAf19d+EtsMF/i8kFUpNi3J/H01QD3Oo8zBQn+NzoCIFkpPLynoOzaJg==}
|
||||
peerDependencies:
|
||||
vite: npm:rolldown-vite@7.3.1
|
||||
vite: '>=2.0.0'
|
||||
|
||||
vite-plugin-html@3.2.2:
|
||||
resolution: {integrity: sha512-vb9C9kcdzcIo/Oc3CLZVS03dL5pDlOFuhGlZYDCJ840BhWl/0nGeZWf3Qy7NlOayscY4Cm/QRgULCQkEZige5Q==}
|
||||
peerDependencies:
|
||||
vite: npm:rolldown-vite@7.3.1
|
||||
vite: '>=2.0.0'
|
||||
|
||||
vite-plugin-image-optimizer@2.0.3:
|
||||
resolution: {integrity: sha512-1vrFOTcpSvv6DCY7h8UXab4wqMAjTJB/ndOzG/Kmj1oDOuPF6mbjkNQoGzzCEYeWGe7qU93jc8oQqvoJ57al3A==}
|
||||
@@ -10281,7 +10335,7 @@ packages:
|
||||
peerDependencies:
|
||||
sharp: '>=0.34.0'
|
||||
svgo: '>=4'
|
||||
vite: npm:rolldown-vite@7.3.1
|
||||
vite: '>=5'
|
||||
peerDependenciesMeta:
|
||||
sharp:
|
||||
optional: true
|
||||
@@ -10291,7 +10345,7 @@ packages:
|
||||
vite-tsconfig-paths@6.1.1:
|
||||
resolution: {integrity: sha512-2cihq7zliibCCZ8P9cKJrQBkfgdvcFkOOc3Y02o3GWUDLgqjWsZudaoiuOwO/gzTzy17cS5F7ZPo4bsnS4DGkg==}
|
||||
peerDependencies:
|
||||
vite: npm:rolldown-vite@7.3.1
|
||||
vite: '*'
|
||||
|
||||
void-elements@3.1.0:
|
||||
resolution: {integrity: sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==}
|
||||
@@ -13901,7 +13955,7 @@ snapshots:
|
||||
- react-dom
|
||||
- tailwindcss
|
||||
|
||||
'@signozhq/ui@0.0.18(@emotion/is-prop-valid@1.2.0)(@signozhq/icons@0.4.0)(@types/react-dom@18.0.10)(@types/react@18.0.26)(react-dom@18.2.0(react@18.2.0))(react-router-dom@5.3.4(react@18.2.0))(react-router@6.27.0(react@18.2.0))(react@18.2.0)':
|
||||
'@signozhq/ui@0.0.19(@emotion/is-prop-valid@1.2.0)(@signozhq/icons@0.4.0)(@types/react-dom@18.0.10)(@types/react@18.0.26)(react-dom@18.2.0(react@18.2.0))(react-router-dom@5.3.4(react@18.2.0))(react-router@6.27.0(react@18.2.0))(react@18.2.0)':
|
||||
dependencies:
|
||||
'@chenglou/pretext': 0.0.5
|
||||
'@radix-ui/react-checkbox': 1.3.3(@types/react-dom@18.0.10)(@types/react@18.0.26)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useCallback, useState } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Dot, Sparkles } from '@signozhq/icons';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import { Popover } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import {
|
||||
@@ -97,7 +97,7 @@ function HeaderRightSection({
|
||||
</span>
|
||||
) : null}
|
||||
|
||||
<Tooltip title="AI Assistant">
|
||||
<TooltipSimple title="AI Assistant">
|
||||
<Button
|
||||
variant="solid"
|
||||
color="secondary"
|
||||
@@ -113,7 +113,7 @@ function HeaderRightSection({
|
||||
>
|
||||
AI Assistant
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useCallback } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import { Drawer } from 'antd';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { Maximize2, MessageSquare, Plus, X } from '@signozhq/icons';
|
||||
@@ -52,7 +52,7 @@ export default function AIAssistantDrawer(): JSX.Element {
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<Tooltip title="New conversation">
|
||||
<TooltipSimple title="New conversation">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -62,9 +62,9 @@ export default function AIAssistantDrawer(): JSX.Element {
|
||||
>
|
||||
<Plus size={16} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="Open full screen">
|
||||
<TooltipSimple title="Open full screen">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -75,9 +75,9 @@ export default function AIAssistantDrawer(): JSX.Element {
|
||||
>
|
||||
<Maximize2 size={16} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="Close">
|
||||
<TooltipSimple title="Close">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -87,7 +87,7 @@ export default function AIAssistantDrawer(): JSX.Element {
|
||||
>
|
||||
<X size={16} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useCallback, useEffect, useState } from 'react';
|
||||
import { createPortal } from 'react-dom';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { History, Maximize2, Minus, Plus, Sparkles, X } from '@signozhq/icons';
|
||||
|
||||
@@ -132,7 +132,7 @@ export default function AIAssistantModal(): JSX.Element | null {
|
||||
</div>
|
||||
|
||||
<div className={styles.actions}>
|
||||
<Tooltip title={showHistory ? 'Back to chat' : 'Conversations'}>
|
||||
<TooltipSimple title={showHistory ? 'Back to chat' : 'Conversations'}>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -142,9 +142,9 @@ export default function AIAssistantModal(): JSX.Element | null {
|
||||
>
|
||||
<History size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="New conversation">
|
||||
<TooltipSimple title="New conversation">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -153,9 +153,9 @@ export default function AIAssistantModal(): JSX.Element | null {
|
||||
>
|
||||
<Plus size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="Open full screen">
|
||||
<TooltipSimple title="Open full screen">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -165,9 +165,9 @@ export default function AIAssistantModal(): JSX.Element | null {
|
||||
>
|
||||
<Maximize2 size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="Minimize to side panel">
|
||||
<TooltipSimple title="Minimize to side panel">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -176,9 +176,9 @@ export default function AIAssistantModal(): JSX.Element | null {
|
||||
>
|
||||
<Minus size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="Close">
|
||||
<TooltipSimple title="Close">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -187,7 +187,7 @@ export default function AIAssistantModal(): JSX.Element | null {
|
||||
>
|
||||
<X size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useCallback, useLayoutEffect, useRef, useState } from 'react';
|
||||
import { matchPath, useHistory, useLocation } from 'react-router-dom';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { History, Maximize2, Plus, Sparkles, X } from '@signozhq/icons';
|
||||
|
||||
@@ -125,7 +125,7 @@ export default function AIAssistantPanel(): JSX.Element | null {
|
||||
</div>
|
||||
|
||||
<div className={styles.actions}>
|
||||
<Tooltip title={showHistory ? 'Back to chat' : 'Conversations'}>
|
||||
<TooltipSimple title={showHistory ? 'Back to chat' : 'Conversations'}>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -135,9 +135,9 @@ export default function AIAssistantPanel(): JSX.Element | null {
|
||||
>
|
||||
<History size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="New conversation">
|
||||
<TooltipSimple title="New conversation">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -147,9 +147,9 @@ export default function AIAssistantPanel(): JSX.Element | null {
|
||||
>
|
||||
<Plus size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="Open full screen">
|
||||
<TooltipSimple title="Open full screen">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -160,9 +160,9 @@ export default function AIAssistantPanel(): JSX.Element | null {
|
||||
>
|
||||
<Maximize2 size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="Close">
|
||||
<TooltipSimple title="Close">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -172,7 +172,7 @@ export default function AIAssistantPanel(): JSX.Element | null {
|
||||
>
|
||||
<X size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { matchPath, useLocation } from 'react-router-dom';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { Bot } from '@signozhq/icons';
|
||||
|
||||
@@ -30,7 +30,7 @@ export default function AIAssistantTrigger(): JSX.Element | null {
|
||||
}
|
||||
|
||||
return (
|
||||
<Tooltip title="AI Assistant">
|
||||
<TooltipSimple title="AI Assistant">
|
||||
<Button
|
||||
variant="solid"
|
||||
color="primary"
|
||||
@@ -40,6 +40,6 @@ export default function AIAssistantTrigger(): JSX.Element | null {
|
||||
>
|
||||
<Bot size={20} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
import cx from 'classnames';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import type { MessageActionDTO } from 'api/ai-assistant/sigNozAIAssistantAPI.schemas';
|
||||
import {
|
||||
ApplyFilterSignalDTO,
|
||||
@@ -524,9 +524,9 @@ export default function ActionsSection({
|
||||
);
|
||||
|
||||
return tooltip ? (
|
||||
<Tooltip key={key} title={tooltip}>
|
||||
<TooltipSimple key={key} title={tooltip}>
|
||||
{chip}
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
) : (
|
||||
<span key={key}>{chip}</span>
|
||||
);
|
||||
|
||||
@@ -5,7 +5,7 @@ import { Badge } from '@signozhq/ui/badge';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Input } from '@signozhq/ui/input';
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@signozhq/ui/popover';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import type { UploadFile } from 'antd';
|
||||
import {
|
||||
getListRulesQueryKey,
|
||||
@@ -899,7 +899,7 @@ export default function ChatInput({
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<Tooltip title="Voice input">
|
||||
<TooltipSimple title="Voice input">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
@@ -910,11 +910,11 @@ export default function ChatInput({
|
||||
>
|
||||
<Mic size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
))}
|
||||
|
||||
{isStreaming && onCancel ? (
|
||||
<Tooltip title="Stop generating">
|
||||
<TooltipSimple title="Stop generating">
|
||||
<Button
|
||||
variant="solid"
|
||||
size="icon"
|
||||
@@ -924,7 +924,7 @@ export default function ChatInput({
|
||||
>
|
||||
<Square size={10} fill="currentColor" strokeWidth={0} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
) : (
|
||||
<Button
|
||||
variant="solid"
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useEffect, useMemo, useState } from 'react';
|
||||
import cx from 'classnames';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Input } from '@signozhq/ui/input';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import { Plus, Search } from '@signozhq/icons';
|
||||
|
||||
import { useAIAssistantStore } from '../../store/useAIAssistantStore';
|
||||
@@ -157,7 +157,7 @@ export default function ConversationsList({
|
||||
{isLoadingThreads && <HeaderLoadingDots />}
|
||||
|
||||
{!isLoadingThreads && showAddNewConversation && (
|
||||
<Tooltip title="New conversation">
|
||||
<TooltipSimple title="New conversation">
|
||||
<Button
|
||||
variant="solid"
|
||||
size="sm"
|
||||
@@ -167,7 +167,7 @@ export default function ConversationsList({
|
||||
>
|
||||
<Plus size={12} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
)}
|
||||
</div>
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import cx from 'classnames';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { DialogWrapper } from '@signozhq/ui/dialog';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { Check, Copy, RefreshCw, ThumbsDown, ThumbsUp } from '@signozhq/icons';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
@@ -126,7 +126,7 @@ export default function MessageFeedback({
|
||||
<>
|
||||
<div className={cx(styles.feedback, { [styles.visible]: isLastAssistant })}>
|
||||
<div className={styles.actions}>
|
||||
<Tooltip title={copied ? 'Copied!' : 'Copy'}>
|
||||
<TooltipSimple title={copied ? 'Copied!' : 'Copy'}>
|
||||
<Button
|
||||
className={styles.btn}
|
||||
size="icon"
|
||||
@@ -136,9 +136,9 @@ export default function MessageFeedback({
|
||||
>
|
||||
{copied ? <Check size={12} /> : <Copy size={12} />}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="Good response">
|
||||
<TooltipSimple title="Good response">
|
||||
<Button
|
||||
className={cx(styles.btn, { [styles.votedUp]: vote === 'positive' })}
|
||||
size="icon"
|
||||
@@ -148,9 +148,9 @@ export default function MessageFeedback({
|
||||
>
|
||||
<ThumbsUp size={12} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
<Tooltip title="Bad response">
|
||||
<TooltipSimple title="Bad response">
|
||||
<Button
|
||||
className={cx(styles.btn, {
|
||||
[styles.votedDown]: vote === 'negative',
|
||||
@@ -162,10 +162,10 @@ export default function MessageFeedback({
|
||||
>
|
||||
<ThumbsDown size={12} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
|
||||
{onRegenerate && (
|
||||
<Tooltip title="Regenerate">
|
||||
<TooltipSimple title="Regenerate">
|
||||
<Button
|
||||
className={styles.btn}
|
||||
size="icon"
|
||||
@@ -175,7 +175,7 @@ export default function MessageFeedback({
|
||||
>
|
||||
<RefreshCw size={12} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
)}
|
||||
</div>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Tooltip } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple } from '@signozhq/ui/tooltip';
|
||||
import { Check, Copy } from '@signozhq/icons';
|
||||
|
||||
import { Message } from '../../types';
|
||||
@@ -32,7 +32,7 @@ export default function UserMessageActions({
|
||||
|
||||
return (
|
||||
<div className={styles.actions}>
|
||||
<Tooltip title={copied ? 'Copied!' : 'Copy'}>
|
||||
<TooltipSimple title={copied ? 'Copied!' : 'Copy'}>
|
||||
<Button
|
||||
className={styles.btn}
|
||||
size="icon"
|
||||
@@ -42,7 +42,7 @@ export default function UserMessageActions({
|
||||
>
|
||||
{copied ? <Check size={12} /> : <Copy size={12} />}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import { Tooltip, TooltipProvider } from '@signozhq/ui/tooltip';
|
||||
import { TooltipSimple, TooltipProvider } from '@signozhq/ui/tooltip';
|
||||
import { Copy } from '@signozhq/icons';
|
||||
import './CopyIconButton.styles.scss';
|
||||
|
||||
@@ -20,7 +20,7 @@ function CopyIconButton({
|
||||
|
||||
return (
|
||||
<TooltipProvider>
|
||||
<Tooltip title={tooltipTitle}>
|
||||
<TooltipSimple title={tooltipTitle}>
|
||||
<span>
|
||||
<Button
|
||||
color="secondary"
|
||||
@@ -33,7 +33,7 @@ function CopyIconButton({
|
||||
onClick={onCopy}
|
||||
/>
|
||||
</span>
|
||||
</Tooltip>
|
||||
</TooltipSimple>
|
||||
</TooltipProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
TabsTrigger,
|
||||
} from '@signozhq/ui/tabs';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipRoot,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
@@ -497,7 +497,7 @@ function SpanDetailsPanel({
|
||||
key: 'dock-toggle',
|
||||
component: (
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipRoot>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
@@ -515,7 +515,7 @@ function SpanDetailsPanel({
|
||||
<TooltipContent className="dock-toggle-tooltip">
|
||||
{isDocked ? 'Open as floating panel' : 'Dock at the bottom'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipRoot>
|
||||
</TooltipProvider>
|
||||
),
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipRoot,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
@@ -149,7 +149,7 @@ export function SpanHoverCard({
|
||||
|
||||
return (
|
||||
<TooltipProvider>
|
||||
<Tooltip open={hoverCardData !== null} onOpenChange={onOpenChange}>
|
||||
<TooltipRoot open={hoverCardData !== null} onOpenChange={onOpenChange}>
|
||||
<TooltipTrigger asChild>
|
||||
<div
|
||||
className="span-hover-card-anchor"
|
||||
@@ -168,7 +168,7 @@ export function SpanHoverCard({
|
||||
>
|
||||
{hoverCardData && <SpanTooltipContent {...hoverCardData.tooltip} />}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipRoot>
|
||||
</TooltipProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useCallback, useState } from 'react';
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipRoot,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
@@ -145,7 +145,7 @@ function TraceDetailsHeader({
|
||||
{!isFilterExpanded && (
|
||||
<>
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipRoot>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
@@ -158,7 +158,7 @@ function TraceDetailsHeader({
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Analytics</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipRoot>
|
||||
</TooltipProvider>
|
||||
<TraceOptionsMenu
|
||||
showTraceDetails={showTraceDetails}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipRoot,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
@@ -22,7 +22,7 @@ export default function SpanLineActionButtons({
|
||||
return (
|
||||
<div className="span-line-action-buttons">
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipRoot>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
@@ -37,7 +37,7 @@ export default function SpanLineActionButtons({
|
||||
<TooltipContent className="span-line-action-tooltip">
|
||||
Copy Span Link
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipRoot>
|
||||
</TooltipProvider>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -15,7 +15,7 @@ import { ToggleGroup, ToggleGroupItem } from '@signozhq/ui/toggle-group';
|
||||
import { toast } from '@signozhq/ui/sonner';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipRoot,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
@@ -269,7 +269,7 @@ function Filters({
|
||||
<>
|
||||
{isFetching && <Loader className="animate-spin" />}
|
||||
{error && (
|
||||
<Tooltip>
|
||||
<TooltipRoot>
|
||||
<TooltipTrigger asChild>
|
||||
<span className="filter-status filter-status--error">
|
||||
<Info />
|
||||
@@ -279,7 +279,7 @@ function Filters({
|
||||
<TooltipContent>
|
||||
{(error as AxiosError)?.message || 'Something went wrong'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipRoot>
|
||||
)}
|
||||
{!error && noData && (
|
||||
<Typography.Text className="filter-status">
|
||||
@@ -304,7 +304,7 @@ function Filters({
|
||||
<TooltipProvider>
|
||||
<div className="trace-v3-filter-row collapsed">
|
||||
{expression ? (
|
||||
<Tooltip>
|
||||
<TooltipRoot>
|
||||
<TooltipTrigger asChild>{pill}</TooltipTrigger>
|
||||
<TooltipContent side="bottom" align="start">
|
||||
<div className="filter-pill-popover">
|
||||
@@ -328,7 +328,7 @@ function Filters({
|
||||
<div className="filter-pill-popover__expression">{expression}</div>
|
||||
</div>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipRoot>
|
||||
) : (
|
||||
pill
|
||||
)}
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
import { Badge } from '@signozhq/ui/badge';
|
||||
import { Button } from '@signozhq/ui/button';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipRoot,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
@@ -112,9 +112,9 @@ const LazyEventDotPopover = memo(function LazyEventDotPopover({
|
||||
|
||||
return (
|
||||
<TooltipProvider>
|
||||
<Tooltip
|
||||
<TooltipRoot
|
||||
open
|
||||
onOpenChange={(open): void => {
|
||||
onOpenChange={(open: boolean): void => {
|
||||
if (!open) {
|
||||
setShowPopover(false);
|
||||
}
|
||||
@@ -129,7 +129,7 @@ const LazyEventDotPopover = memo(function LazyEventDotPopover({
|
||||
attributeMap={event.attributeMap || {}}
|
||||
/>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipRoot>
|
||||
</TooltipProvider>
|
||||
);
|
||||
});
|
||||
@@ -329,7 +329,7 @@ const SpanOverview = memo(function SpanOverview({
|
||||
{/* Action buttons — shown on hover via CSS, right-aligned */}
|
||||
<span className="span-row-actions">
|
||||
<TooltipProvider delayDuration={200}>
|
||||
<Tooltip>
|
||||
<TooltipRoot>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
@@ -344,8 +344,8 @@ const SpanOverview = memo(function SpanOverview({
|
||||
<TooltipContent className="span-action-tooltip">
|
||||
Copy Span Link
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
<Tooltip>
|
||||
</TooltipRoot>
|
||||
<TooltipRoot>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
@@ -360,7 +360,7 @@ const SpanOverview = memo(function SpanOverview({
|
||||
<TooltipContent className="span-action-tooltip">
|
||||
Add to Trace Funnel
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipRoot>
|
||||
</TooltipProvider>
|
||||
</span>
|
||||
</div>
|
||||
|
||||
76
pkg/modules/tag/impltag/module.go
Normal file
76
pkg/modules/tag/impltag/module.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package impltag
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/tag"
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/tagtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store tagtypes.Store
|
||||
}
|
||||
|
||||
func NewModule(store tagtypes.Store) tag.Module {
|
||||
return &module{store: store}
|
||||
}
|
||||
|
||||
func (m *module) SyncTags(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceID valuer.UUID, postable []tagtypes.PostableTag) ([]*tagtypes.Tag, error) {
|
||||
var tags []*tagtypes.Tag
|
||||
err := m.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
resolved, err := m.createMany(ctx, orgID, kind, postable)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tagIDs := make([]valuer.UUID, len(resolved))
|
||||
for i, t := range resolved {
|
||||
tagIDs[i] = t.ID
|
||||
}
|
||||
if err := m.syncLinksForResource(ctx, orgID, kind, resourceID, tagIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
tags = resolved
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
func (m *module) createMany(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, postable []tagtypes.PostableTag) ([]*tagtypes.Tag, error) {
|
||||
if len(postable) == 0 {
|
||||
return []*tagtypes.Tag{}, nil
|
||||
}
|
||||
|
||||
toCreate, matched, err := m.resolve(ctx, orgID, kind, postable)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
created, err := m.store.CreateOrGet(ctx, toCreate)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return append(matched, created...), nil
|
||||
}
|
||||
|
||||
func (m *module) syncLinksForResource(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceID valuer.UUID, tagIDs []valuer.UUID) error {
|
||||
return m.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
if err := m.store.CreateRelations(ctx, tagtypes.NewTagRelations(kind, resourceID, tagIDs)); err != nil {
|
||||
return err
|
||||
}
|
||||
return m.store.DeleteRelationsExcept(ctx, orgID, kind, resourceID, tagIDs)
|
||||
})
|
||||
}
|
||||
|
||||
func (m *module) ListForResource(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceID valuer.UUID) ([]*tagtypes.Tag, error) {
|
||||
return m.store.ListByResource(ctx, orgID, kind, resourceID)
|
||||
}
|
||||
|
||||
func (m *module) ListForResources(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceIDs []valuer.UUID) (map[valuer.UUID][]*tagtypes.Tag, error) {
|
||||
return m.store.ListByResources(ctx, orgID, kind, resourceIDs)
|
||||
}
|
||||
59
pkg/modules/tag/impltag/resolve.go
Normal file
59
pkg/modules/tag/impltag/resolve.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package impltag
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/tagtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// resolve canonicalizes a batch of user-supplied (key, value) tag pairs against
|
||||
// the existing tags for an org. Lookup is case-insensitive on both key and
|
||||
// value (matching the storage uniqueness rule); when an existing row matches,
|
||||
// its display casing is reused. Inputs are deduped on (LOWER(key), LOWER(value));
|
||||
// the first input's casing wins on collisions. Returns:
|
||||
// - toCreate: new Tag rows the caller should insert (with pre-generated IDs)
|
||||
// - matched: existing rows the caller's input already pointed to. They
|
||||
// already carry authoritative IDs from the store.
|
||||
func (m *module) resolve(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, postable []tagtypes.PostableTag) ([]*tagtypes.Tag, []*tagtypes.Tag, error) {
|
||||
if len(postable) == 0 {
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
existing, err := m.store.List(ctx, orgID, kind)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
lowercaseTagsMap := make(map[string]*tagtypes.Tag, len(existing))
|
||||
for _, t := range existing {
|
||||
mapKey := strings.ToLower(t.Key) + "\x00" + strings.ToLower(t.Value)
|
||||
lowercaseTagsMap[mapKey] = t
|
||||
}
|
||||
|
||||
seenInRequestAlready := make(map[string]struct{}, len(postable)) // postable can have the same tag multiple times
|
||||
toCreate := make([]*tagtypes.Tag, 0)
|
||||
matched := make([]*tagtypes.Tag, 0)
|
||||
|
||||
for _, p := range postable {
|
||||
key, value, err := tagtypes.ValidatePostableTag(p)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
lookup := strings.ToLower(key) + "\x00" + strings.ToLower(value)
|
||||
if _, dup := seenInRequestAlready[lookup]; dup {
|
||||
continue
|
||||
}
|
||||
seenInRequestAlready[lookup] = struct{}{}
|
||||
|
||||
if existingTag, ok := lowercaseTagsMap[lookup]; ok {
|
||||
matched = append(matched, existingTag)
|
||||
continue
|
||||
}
|
||||
toCreate = append(toCreate, tagtypes.NewTag(orgID, kind, key, value))
|
||||
}
|
||||
|
||||
return toCreate, matched, nil
|
||||
}
|
||||
112
pkg/modules/tag/impltag/resolve_test.go
Normal file
112
pkg/modules/tag/impltag/resolve_test.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package impltag
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/tagtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/tagtypes/tagtypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var testKind = coretypes.KindDashboard
|
||||
|
||||
func TestModule_Resolve(t *testing.T) {
|
||||
t.Run("empty input does not hit store", func(t *testing.T) {
|
||||
store := tagtypestest.NewStore()
|
||||
m := &module{store: store}
|
||||
|
||||
toCreate, matched, err := m.resolve(context.Background(), valuer.GenerateUUID(), testKind, nil)
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, toCreate)
|
||||
assert.Empty(t, matched)
|
||||
assert.Zero(t, store.ListCallCount, "should not hit store when input is empty")
|
||||
})
|
||||
|
||||
t.Run("creates missing pairs and reuses existing", func(t *testing.T) {
|
||||
orgID := valuer.GenerateUUID()
|
||||
dbTag := tagtypes.NewTag(orgID, testKind, "team", "Pulse")
|
||||
dbTag2 := tagtypes.NewTag(orgID, testKind, "Database", "redis")
|
||||
store := tagtypestest.NewStore()
|
||||
store.Tags = []*tagtypes.Tag{dbTag, dbTag2}
|
||||
m := &module{store: store}
|
||||
|
||||
toCreate, matched, err := m.resolve(context.Background(), orgID, testKind, []tagtypes.PostableTag{
|
||||
{Key: "team", Value: "events"}, // new
|
||||
{Key: "DATABASE", Value: "REDIS"}, // case-only conflict
|
||||
{Key: "Brand", Value: "New"}, // new
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
createdLowerKVs := []string{}
|
||||
for _, tg := range toCreate {
|
||||
createdLowerKVs = append(createdLowerKVs, strings.ToLower(tg.Key)+"\x00"+strings.ToLower(tg.Value))
|
||||
}
|
||||
assert.ElementsMatch(t, []string{"team\x00events", "brand\x00new"}, createdLowerKVs,
|
||||
"only the two missing pairs should be returned for insertion")
|
||||
|
||||
require.Len(t, matched, 1, "DATABASE:REDIS should hit the existing 'Database:redis' tag")
|
||||
assert.Same(t, dbTag2, matched[0], "matched should return the existing pointer with its authoritative ID")
|
||||
})
|
||||
|
||||
t.Run("dedupes inputs that map to the same lower(key)+lower(value)", func(t *testing.T) {
|
||||
orgID := valuer.GenerateUUID()
|
||||
store := tagtypestest.NewStore()
|
||||
m := &module{store: store}
|
||||
|
||||
toCreate, matched, err := m.resolve(context.Background(), orgID, testKind, []tagtypes.PostableTag{
|
||||
{Key: "Foo", Value: "Bar"},
|
||||
{Key: "foo", Value: "bar"},
|
||||
{Key: "FOO", Value: "BAR"},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Empty(t, matched)
|
||||
require.Len(t, toCreate, 1, "duplicate inputs must collapse into a single insert")
|
||||
assert.Equal(t, "Foo", toCreate[0].Key, "first input's casing wins")
|
||||
assert.Equal(t, "Bar", toCreate[0].Value, "first input's casing wins")
|
||||
})
|
||||
|
||||
t.Run("preserves existing casing on case-only match", func(t *testing.T) {
|
||||
orgID := valuer.GenerateUUID()
|
||||
dbTag := tagtypes.NewTag(orgID, testKind, "Team", "Pulse")
|
||||
store := tagtypestest.NewStore()
|
||||
store.Tags = []*tagtypes.Tag{dbTag}
|
||||
m := &module{store: store}
|
||||
|
||||
toCreate, matched, err := m.resolve(context.Background(), orgID, testKind, []tagtypes.PostableTag{
|
||||
{Key: "team", Value: "PULSE"},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Empty(t, toCreate)
|
||||
require.Len(t, matched, 1)
|
||||
assert.Equal(t, "Team", matched[0].Key)
|
||||
assert.Equal(t, "Pulse", matched[0].Value)
|
||||
})
|
||||
|
||||
t.Run("propagates validation error from any input", func(t *testing.T) {
|
||||
store := tagtypestest.NewStore()
|
||||
m := &module{store: store}
|
||||
|
||||
_, _, err := m.resolve(context.Background(), valuer.GenerateUUID(), testKind, []tagtypes.PostableTag{
|
||||
{Key: "team", Value: "pulse"},
|
||||
{Key: "", Value: "x"},
|
||||
})
|
||||
require.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("propagates regex validation error", func(t *testing.T) {
|
||||
store := tagtypestest.NewStore()
|
||||
m := &module{store: store}
|
||||
|
||||
_, _, err := m.resolve(context.Background(), valuer.GenerateUUID(), testKind, []tagtypes.PostableTag{
|
||||
{Key: "team!eng", Value: "pulse"},
|
||||
})
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
148
pkg/modules/tag/impltag/store.go
Normal file
148
pkg/modules/tag/impltag/store.go
Normal file
@@ -0,0 +1,148 @@
|
||||
package impltag
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/tagtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewStore(sqlstore sqlstore.SQLStore) tagtypes.Store {
|
||||
return &store{sqlstore: sqlstore}
|
||||
}
|
||||
|
||||
func (s *store) List(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind) ([]*tagtypes.Tag, error) {
|
||||
tags := make([]*tagtypes.Tag, 0)
|
||||
err := s.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&tags).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("kind = ?", kind).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
func (s *store) ListByResource(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceID valuer.UUID) ([]*tagtypes.Tag, error) {
|
||||
tags := make([]*tagtypes.Tag, 0)
|
||||
err := s.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&tags).
|
||||
Join("JOIN tag_relation AS tr ON tr.tag_id = tag.id").
|
||||
Where("tr.kind = ?", kind).
|
||||
Where("tr.resource_id = ?", resourceID).
|
||||
Where("tag.org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
func (s *store) ListByResources(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceIDs []valuer.UUID) (map[valuer.UUID][]*tagtypes.Tag, error) {
|
||||
if len(resourceIDs) == 0 {
|
||||
return map[valuer.UUID][]*tagtypes.Tag{}, nil
|
||||
}
|
||||
|
||||
type joinedRow struct {
|
||||
tagtypes.Tag `bun:",extend"`
|
||||
ResourceID valuer.UUID `bun:"resource_id"`
|
||||
}
|
||||
|
||||
rows := make([]*joinedRow, 0)
|
||||
err := s.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&rows).
|
||||
ColumnExpr("tag.*, tr.resource_id").
|
||||
Join("JOIN tag_relation AS tr ON tr.tag_id = tag.id").
|
||||
Where("tr.kind = ?", kind).
|
||||
Where("tr.resource_id IN (?)", bun.In(resourceIDs)).
|
||||
Where("tag.org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
out := make(map[valuer.UUID][]*tagtypes.Tag)
|
||||
for _, r := range rows {
|
||||
tag := r.Tag
|
||||
out[r.ResourceID] = append(out[r.ResourceID], &tag)
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (s *store) CreateOrGet(ctx context.Context, tags []*tagtypes.Tag) ([]*tagtypes.Tag, error) {
|
||||
if len(tags) == 0 {
|
||||
return tags, nil
|
||||
}
|
||||
// DO UPDATE on a self-set is a deliberate no-op write whose only purpose
|
||||
// is to make RETURNING fire on conflicting rows. Without it, RETURNING is
|
||||
// silent on the conflict path and we'd have to refetch by (key, value) to
|
||||
// learn the existing rows' IDs after a concurrent-insert race. Setting
|
||||
// key = tag.key (the existing row's value) preserves the first writer's
|
||||
// casing on case-only collisions.
|
||||
err := s.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(&tags).
|
||||
// On("CONFLICT (org_id, kind, (LOWER(key)), (LOWER(value))) DO UPDATE").
|
||||
Set("key = tag.key").
|
||||
Returning("*").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
func (s *store) CreateRelations(ctx context.Context, relations []*tagtypes.TagRelation) error {
|
||||
if len(relations) == 0 {
|
||||
return nil
|
||||
}
|
||||
_, err := s.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(&relations).
|
||||
On("CONFLICT (kind, resource_id, tag_id) DO NOTHING").
|
||||
Exec(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *store) DeleteRelationsExcept(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceID valuer.UUID, keepTagIDs []valuer.UUID) error {
|
||||
// Scope the delete to the caller's org via a subquery on tag — bun's
|
||||
// DELETE-with-JOIN syntax isn't uniformly portable across Postgres/SQLite.
|
||||
tagIDsToDelete := s.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
TableExpr("tag").
|
||||
Column("id").
|
||||
Where("org_id = ?", orgID)
|
||||
if len(keepTagIDs) > 0 {
|
||||
tagIDsToDelete = tagIDsToDelete.Where("id NOT IN (?)", bun.In(keepTagIDs))
|
||||
}
|
||||
_, err := s.sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model((*tagtypes.TagRelation)(nil)).
|
||||
Where("kind = ?", kind).
|
||||
Where("resource_id = ?", resourceID).
|
||||
Where("tag_id IN (?)", tagIDsToDelete).
|
||||
Exec(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *store) RunInTx(ctx context.Context, cb func(ctx context.Context) error) error {
|
||||
return s.sqlstore.RunInTxCtx(ctx, nil, cb)
|
||||
}
|
||||
149
pkg/modules/tag/impltag/store_test.go
Normal file
149
pkg/modules/tag/impltag/store_test.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package impltag
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory/factorytest"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlitesqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/tagtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
func newTestStore(t *testing.T) sqlstore.SQLStore {
|
||||
t.Helper()
|
||||
dbPath := filepath.Join(t.TempDir(), "test.db")
|
||||
store, err := sqlitesqlstore.New(context.Background(), factorytest.NewSettings(), sqlstore.Config{
|
||||
Provider: "sqlite",
|
||||
Connection: sqlstore.ConnectionConfig{
|
||||
MaxOpenConns: 1,
|
||||
MaxConnLifetime: 0,
|
||||
},
|
||||
Sqlite: sqlstore.SqliteConfig{
|
||||
Path: dbPath,
|
||||
Mode: "wal",
|
||||
BusyTimeout: 5 * time.Second,
|
||||
TransactionMode: "deferred",
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = store.BunDB().NewCreateTable().
|
||||
Model((*tagtypes.Tag)(nil)).
|
||||
IfNotExists().
|
||||
Exec(context.Background())
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = store.BunDB().Exec(`CREATE UNIQUE INDEX IF NOT EXISTS uq_tag_org_kind_lower_key_lower_value ON tag (org_id, kind, LOWER(key), LOWER(value))`)
|
||||
require.NoError(t, err)
|
||||
return store
|
||||
}
|
||||
|
||||
var dashboardKind = coretypes.KindDashboard
|
||||
|
||||
func tagsByLowerKeyValue(t *testing.T, db *bun.DB) map[string]*tagtypes.Tag {
|
||||
t.Helper()
|
||||
all := make([]*tagtypes.Tag, 0)
|
||||
require.NoError(t, db.NewSelect().Model(&all).Scan(context.Background()))
|
||||
out := map[string]*tagtypes.Tag{}
|
||||
for _, tag := range all {
|
||||
out[strings.ToLower(tag.Key)+"\x00"+strings.ToLower(tag.Value)] = tag
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func TestStore_Create_PopulatesIDsOnFreshInsert(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
sqlstore := newTestStore(t)
|
||||
s := NewStore(sqlstore)
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
tagA := tagtypes.NewTag(orgID, dashboardKind, "tag", "Database")
|
||||
tagB := tagtypes.NewTag(orgID, dashboardKind, "team", "BLR")
|
||||
preIDA := tagA.ID
|
||||
preIDB := tagB.ID
|
||||
|
||||
got, err := s.CreateOrGet(ctx, []*tagtypes.Tag{tagA, tagB})
|
||||
require.NoError(t, err)
|
||||
require.Len(t, got, 2)
|
||||
|
||||
// No race → pre-generated IDs stand. The slice is what we passed in,
|
||||
// confirming Scan didn't reallocate.
|
||||
assert.Equal(t, preIDA, got[0].ID)
|
||||
assert.Equal(t, preIDB, got[1].ID)
|
||||
|
||||
// And the rows are in the DB.
|
||||
stored := tagsByLowerKeyValue(t, sqlstore.BunDB())
|
||||
require.Contains(t, stored, "tag\x00database")
|
||||
require.Contains(t, stored, "team\x00blr")
|
||||
assert.Equal(t, preIDA, stored["tag\x00database"].ID)
|
||||
assert.Equal(t, preIDB, stored["team\x00blr"].ID)
|
||||
}
|
||||
|
||||
// todo (@namanverma): uncomment once unique index is there.
|
||||
//
|
||||
// func TestStore_Create_ConflictReturnsExistingRowID(t *testing.T) {
|
||||
// ctx := context.Background()
|
||||
// sqlstore := newTestStore(t)
|
||||
// s := NewStore(sqlstore)
|
||||
|
||||
// orgID := valuer.GenerateUUID()
|
||||
|
||||
// // Simulate a concurrent insert: someone else has already inserted "tag:Database".
|
||||
// winner := tagtypes.NewTag(orgID, dashboardKind, "tag", "Database")
|
||||
// _, err := s.CreateOrGet(ctx, []*tagtypes.Tag{winner})
|
||||
// require.NoError(t, err)
|
||||
// winnerID := winner.ID
|
||||
|
||||
// // Now our request runs with a different pre-generated ID for the same
|
||||
// // (key, value) — case differs but the functional unique index collapses
|
||||
// // them. RETURNING should overwrite our stale ID with winner's ID.
|
||||
// loser := tagtypes.NewTag(orgID, dashboardKind, "TAG", "DATABASE")
|
||||
// loserPreID := loser.ID
|
||||
// require.NotEqual(t, winnerID, loserPreID, "pre-generated IDs must differ for this test to be meaningful")
|
||||
|
||||
// got, err := s.CreateOrGet(ctx, []*tagtypes.Tag{loser})
|
||||
// require.NoError(t, err)
|
||||
// require.Len(t, got, 1)
|
||||
|
||||
// assert.Equal(t, winnerID, got[0].ID, "returned slice should carry the existing row's ID, not our stale one")
|
||||
// assert.Equal(t, winnerID, loser.ID, "input slice element is mutated in place")
|
||||
|
||||
// // And the DB still has exactly one row for that (lower(key), lower(value)) — winner's, with winner's casing.
|
||||
// stored := tagsByLowerKeyValue(t, sqlstore.BunDB())
|
||||
// require.Len(t, stored, 1)
|
||||
// assert.Equal(t, winnerID, stored["tag\x00database"].ID)
|
||||
// assert.Equal(t, "tag", stored["tag\x00database"].Key, "winner's casing preserved in key")
|
||||
// assert.Equal(t, "Database", stored["tag\x00database"].Value, "winner's casing preserved in value")
|
||||
// }
|
||||
|
||||
// func TestStore_Create_MixedFreshAndConflict(t *testing.T) {
|
||||
// ctx := context.Background()
|
||||
// sqlstore := newTestStore(t)
|
||||
// s := NewStore(sqlstore)
|
||||
|
||||
// orgID := valuer.GenerateUUID()
|
||||
// pre := tagtypes.NewTag(orgID, dashboardKind, "tag", "Database")
|
||||
// _, err := s.CreateOrGet(ctx, []*tagtypes.Tag{pre})
|
||||
// require.NoError(t, err)
|
||||
// preExistingID := pre.ID
|
||||
|
||||
// conflict := tagtypes.NewTag(orgID, dashboardKind, "tag", "Database")
|
||||
// fresh := tagtypes.NewTag(orgID, dashboardKind, "team", "BLR")
|
||||
// freshPreID := fresh.ID
|
||||
|
||||
// got, err := s.CreateOrGet(ctx, []*tagtypes.Tag{conflict, fresh})
|
||||
// require.NoError(t, err)
|
||||
// require.Len(t, got, 2)
|
||||
|
||||
// assert.Equal(t, preExistingID, got[0].ID, "conflicting row's ID overwritten with the existing row's")
|
||||
// assert.Equal(t, freshPreID, got[1].ID, "fresh row's pre-generated ID is preserved")
|
||||
// }
|
||||
20
pkg/modules/tag/tag.go
Normal file
20
pkg/modules/tag/tag.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package tag
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/tagtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
// SyncTags resolves the given postable tags (creating new rows as needed)
|
||||
// and reconciles the resource's links to exactly that set, all in one transaction.
|
||||
SyncTags(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceID valuer.UUID, postable []tagtypes.PostableTag) ([]*tagtypes.Tag, error)
|
||||
|
||||
ListForResource(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceID valuer.UUID) ([]*tagtypes.Tag, error)
|
||||
|
||||
// Resources with no tags are absent from the returned map.
|
||||
ListForResources(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceIDs []valuer.UUID) (map[valuer.UUID][]*tagtypes.Tag, error)
|
||||
}
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention/implretention"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tag/impltag"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
@@ -45,6 +46,7 @@ func TestNewHandlers(t *testing.T) {
|
||||
emailing := emailingtest.New()
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
require.NoError(t, err)
|
||||
tagModule := impltag.NewModule(impltag.NewStore(sqlstore))
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
|
||||
flagger, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry())
|
||||
@@ -53,8 +55,9 @@ func TestNewHandlers(t *testing.T) {
|
||||
userRoleStore := impluser.NewUserRoleStore(sqlstore, providerSettings)
|
||||
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings), userRoleStore, flagger)
|
||||
|
||||
retentionGetter := implretention.NewGetter(implretention.NewStore(sqlstore))
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, nil, nil, retentionGetter, flagger)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, nil, nil, retentionGetter, flagger, tagModule)
|
||||
|
||||
querierHandler := querier.NewHandler(providerSettings, nil, nil)
|
||||
registryHandler := factory.NewHandler(nil)
|
||||
|
||||
@@ -45,6 +45,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/spanmapper/implspanmapper"
|
||||
"github.com/SigNoz/signoz/pkg/modules/spanpercentile"
|
||||
"github.com/SigNoz/signoz/pkg/modules/spanpercentile/implspanpercentile"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tag"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracedetail"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracedetail/impltracedetail"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
|
||||
@@ -88,6 +89,7 @@ type Modules struct {
|
||||
TraceDetail tracedetail.Module
|
||||
SpanMapper spanmapper.Module
|
||||
LLMPricingRule llmpricingrule.Module
|
||||
Tag tag.Module
|
||||
}
|
||||
|
||||
func NewModules(
|
||||
@@ -113,6 +115,7 @@ func NewModules(
|
||||
cloudIntegrationModule cloudintegration.Module,
|
||||
retentionGetter retention.Getter,
|
||||
fl flagger.Flagger,
|
||||
tagModule tag.Module,
|
||||
) Modules {
|
||||
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
|
||||
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
|
||||
@@ -145,5 +148,6 @@ func NewModules(
|
||||
TraceDetail: impltracedetail.NewModule(impltracedetail.NewTraceStore(telemetryStore), providerSettings, config.TraceDetail),
|
||||
SpanMapper: implspanmapper.NewModule(implspanmapper.NewStore(sqlstore)),
|
||||
LLMPricingRule: impllmpricingrule.NewModule(impllmpricingrule.NewStore(sqlstore)),
|
||||
Tag: tagModule,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/retention/implretention"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount/implserviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tag/impltag"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sharder"
|
||||
@@ -46,6 +47,7 @@ func TestNewModules(t *testing.T) {
|
||||
emailing := emailingtest.New()
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
require.NoError(t, err)
|
||||
tagModule := impltag.NewModule(impltag.NewStore(sqlstore))
|
||||
dashboardModule := impldashboard.NewModule(impldashboard.NewStore(sqlstore), providerSettings, nil, orgGetter, queryParser)
|
||||
|
||||
flagger, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry())
|
||||
@@ -58,7 +60,8 @@ func TestNewModules(t *testing.T) {
|
||||
serviceAccount := implserviceaccount.NewModule(implserviceaccount.NewStore(sqlstore), nil, nil, nil, providerSettings, serviceaccount.Config{})
|
||||
|
||||
retentionGetter := implretention.NewGetter(implretention.NewStore(sqlstore))
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, serviceAccount, implcloudintegration.NewModule(), retentionGetter, flagger)
|
||||
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, nil, nil, nil, nil, nil, nil, nil, queryParser, Config{}, dashboardModule, userGetter, userRoleStore, serviceAccount, implcloudintegration.NewModule(), retentionGetter, flagger, tagModule)
|
||||
|
||||
reflectVal := reflect.ValueOf(modules)
|
||||
for i := 0; i < reflectVal.NumField(); i++ {
|
||||
|
||||
@@ -201,6 +201,7 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewAddSpanMapperFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddLLMPricingRulesFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewMigrateMetaresourcesTuplesFactory(sqlstore),
|
||||
sqlmigration.NewAddTagsFactory(sqlstore, sqlschema),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/rulestatehistory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount/implserviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/modules/tag/impltag"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
@@ -332,6 +333,11 @@ func New(
|
||||
// Initialize query parser (needed for dashboard module)
|
||||
queryParser := queryparser.New(providerSettings)
|
||||
|
||||
// Initialize tag module — shared across modules that link entities to tags
|
||||
// (currently dashboard; future: alerts, RBAC). Built once here and injected
|
||||
// where needed.
|
||||
tagModule := impltag.NewModule(impltag.NewStore(sqlstore))
|
||||
|
||||
// Initialize dashboard module
|
||||
dashboard := dashboardModuleCallback(sqlstore, providerSettings, analytics, orgGetter, queryParser, querier, licensing)
|
||||
|
||||
@@ -455,7 +461,7 @@ func New(
|
||||
}
|
||||
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, userGetter, userRoleStore, serviceAccount, cloudIntegrationModule, retentionGetter, flagger)
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard, userGetter, userRoleStore, serviceAccount, cloudIntegrationModule, retentionGetter, flagger, tagModule)
|
||||
|
||||
// Initialize ruler from the variant-specific provider factories
|
||||
rulerInstance, err := factory.NewProviderFromNamedMap(ctx, providerSettings, config.Ruler, rulerProviderFactories(cache, alertmanager, sqlstore, telemetrystore, telemetryMetadataStore, prometheus, orgGetter, modules.RuleStateHistory, querier, queryParser), "signoz")
|
||||
|
||||
106
pkg/sqlmigration/082_add_tags.go
Normal file
106
pkg/sqlmigration/082_add_tags.go
Normal file
@@ -0,0 +1,106 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addTags struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddTagsFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_tags"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return &addTags{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addTags) Register(migrations *migrate.Migrations) error {
|
||||
return migrations.Register(migration.Up, migration.Down)
|
||||
}
|
||||
|
||||
func (migration *addTags) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
sqls := [][]byte{}
|
||||
|
||||
tagTableSQLs := migration.sqlschema.Operator().CreateTable(&sqlschema.Table{
|
||||
Name: "tag",
|
||||
Columns: []*sqlschema.Column{
|
||||
{Name: "id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "key", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "value", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "org_id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "kind", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "created_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
|
||||
{Name: "updated_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
|
||||
},
|
||||
PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{"id"}},
|
||||
ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{
|
||||
{
|
||||
ReferencingColumnName: sqlschema.ColumnName("org_id"),
|
||||
ReferencedTableName: sqlschema.TableName("organizations"),
|
||||
ReferencedColumnName: sqlschema.ColumnName("id"),
|
||||
},
|
||||
},
|
||||
})
|
||||
sqls = append(sqls, tagTableSQLs...)
|
||||
|
||||
// TODO (@namanverma): add a unique index for tags: (org_id, kind, (LOWER(key)), (LOWER(value)))
|
||||
|
||||
tagRelationsTableSQLs := migration.sqlschema.Operator().CreateTable(&sqlschema.Table{
|
||||
Name: "tag_relation",
|
||||
Columns: []*sqlschema.Column{
|
||||
{Name: "id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "kind", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "resource_id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "tag_id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "created_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
|
||||
},
|
||||
PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{"id"}},
|
||||
ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{
|
||||
{
|
||||
ReferencingColumnName: sqlschema.ColumnName("tag_id"),
|
||||
ReferencedTableName: sqlschema.TableName("tag"),
|
||||
ReferencedColumnName: sqlschema.ColumnName("id"),
|
||||
},
|
||||
},
|
||||
})
|
||||
sqls = append(sqls, tagRelationsTableSQLs...)
|
||||
|
||||
tagRelationUniqueIndexSQLs := migration.sqlschema.Operator().CreateIndex(
|
||||
&sqlschema.UniqueIndex{
|
||||
TableName: "tag_relation",
|
||||
ColumnNames: []sqlschema.ColumnName{"kind", "resource_id", "tag_id"},
|
||||
},
|
||||
)
|
||||
sqls = append(sqls, tagRelationUniqueIndexSQLs...)
|
||||
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return tx.Commit()
|
||||
}
|
||||
|
||||
func (migration *addTags) Down(_ context.Context, _ *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -41,7 +41,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
// TODO(Piyush): Update this to support multiple JSON columns based on evolutions
|
||||
for _, column := range columns {
|
||||
// TODO(Tushar): thread orgID here to evaluate correctly
|
||||
if column.Type.GetType() == schema.ColumnTypeEnumJSON && key.FieldContext == telemetrytypes.FieldContextBody && c.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) && key.Name != messageSubField {
|
||||
if column.Type.GetType() == schema.ColumnTypeEnumJSON && c.fl.BooleanOrEmpty(ctx, flagger.FeatureUseJSONBody, featuretypes.NewFlaggerEvaluationContext(valuer.UUID{})) && key.Name != messageSubField {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
|
||||
@@ -33,7 +33,7 @@ func (t TestExpected) GetQuery() string {
|
||||
}
|
||||
|
||||
func TestJSONStmtBuilder_TimeSeries(t *testing.T) {
|
||||
statementBuilder, _ := buildJSONTestStatementBuilder(t, false)
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, false)
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
@@ -171,7 +171,7 @@ func TestStmtBuilderTimeSeriesBodyGroupByPromoted(t *testing.T) {
|
||||
*/
|
||||
|
||||
func TestJSONStmtBuilder_PrimitivePaths(t *testing.T) {
|
||||
statementBuilder, _ := buildJSONTestStatementBuilder(t, false)
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, false)
|
||||
cases := []struct {
|
||||
name string
|
||||
filter string
|
||||
@@ -494,7 +494,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
*/
|
||||
|
||||
func TestJSONStmtBuilder_ArrayPaths(t *testing.T) {
|
||||
statementBuilder, _ := buildJSONTestStatementBuilder(t, false)
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, false)
|
||||
cases := []struct {
|
||||
name string
|
||||
filter string
|
||||
@@ -799,7 +799,7 @@ func TestJSONStmtBuilder_ArrayPaths(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestJSONStmtBuilder_IndexedPaths(t *testing.T) {
|
||||
statementBuilder, _ := buildJSONTestStatementBuilder(t, true)
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, true)
|
||||
cases := []struct {
|
||||
name string
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
@@ -918,7 +918,7 @@ func TestJSONStmtBuilder_IndexedPaths(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestJSONStmtBuilder_SelectField(t *testing.T) {
|
||||
statementBuilder, _ := buildJSONTestStatementBuilder(t, false)
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, false)
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
@@ -1006,7 +1006,7 @@ func TestJSONStmtBuilder_SelectField(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestJSONStmtBuilder_OrderBy(t *testing.T) {
|
||||
statementBuilder, _ := buildJSONTestStatementBuilder(t, false)
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, false)
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
@@ -1082,69 +1082,6 @@ func TestJSONStmtBuilder_OrderBy(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestResourceAggrAndGroupBy_WithJSONEnabled(t *testing.T) {
|
||||
statementBuilder, metadataStore := buildJSONTestStatementBuilder(t, false)
|
||||
releaseTime := time.Date(2024, 1, 15, 10, 0, 0, 0, time.UTC)
|
||||
keysMap := buildCompleteFieldKeyMap(releaseTime)
|
||||
for _, keys := range keysMap {
|
||||
for _, key := range keys {
|
||||
metadataStore.SetKey(key)
|
||||
}
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
expected qbtypes.Statement
|
||||
expectedErrContains string
|
||||
}{
|
||||
{
|
||||
name: "resource_aggregation_and_group_by_with_json_enabled",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "region",
|
||||
},
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "user.name exists",
|
||||
},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count_distinct(service.name)",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`region`::String IS NOT NULL, resource.`region`::String, NULL)) AS `region`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE ((dynamicElement(body_v2.`user.name`, 'String') IS NOT NULL) OR mapContains(attributes_string, 'user.name') = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY ts, `region`",
|
||||
Args: []any{true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||
Warnings: []string{"Key `user.name` is ambiguous, found 2 different combinations of field context / data type: [name=user.name,context=body,datatype=string name=user.name,context=attribute,datatype=string]."},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
if c.expectedErrContains != "" {
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErrContains)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func buildTestTelemetryMetadataStore(t *testing.T, addIndexes bool) *telemetrytypestest.MockMetadataStore {
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.SetStaticFields(IntrinsicFields)
|
||||
@@ -1186,7 +1123,7 @@ func buildTestTelemetryMetadataStore(t *testing.T, addIndexes bool) *telemetryty
|
||||
return mockMetadataStore
|
||||
}
|
||||
|
||||
func buildJSONTestStatementBuilder(t *testing.T, addIndexes bool) (*logQueryStatementBuilder, *telemetrytypestest.MockMetadataStore) {
|
||||
func buildJSONTestStatementBuilder(t *testing.T, addIndexes bool) *logQueryStatementBuilder {
|
||||
t.Helper()
|
||||
|
||||
mockMetadataStore := buildTestTelemetryMetadataStore(t, addIndexes)
|
||||
@@ -1207,5 +1144,5 @@ func buildJSONTestStatementBuilder(t *testing.T, addIndexes bool) (*logQueryStat
|
||||
fl,
|
||||
)
|
||||
|
||||
return statementBuilder, mockMetadataStore
|
||||
return statementBuilder
|
||||
}
|
||||
|
||||
30
pkg/types/tagtypes/store.go
Normal file
30
pkg/types/tagtypes/store.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package tagtypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Store interface {
|
||||
List(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind) ([]*Tag, error)
|
||||
|
||||
ListByResource(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceID valuer.UUID) ([]*Tag, error)
|
||||
|
||||
ListByResources(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceIDs []valuer.UUID) (map[valuer.UUID][]*Tag, error)
|
||||
|
||||
// CreateOrGet upserts the given tags and returns them with authoritative IDs.
|
||||
// On conflict on (org_id, kind, LOWER(key), LOWER(value)) — which
|
||||
// happens only when a concurrent insert raced ours, including casing-only
|
||||
// collisions — the returned entry carries the existing row's ID rather
|
||||
// than the pre-generated one in the input.
|
||||
CreateOrGet(ctx context.Context, tags []*Tag) ([]*Tag, error)
|
||||
|
||||
// CreateRelations inserts tag-resource relations. Conflicts on the composite primary key are ignored.
|
||||
CreateRelations(ctx context.Context, relations []*TagRelation) error
|
||||
|
||||
DeleteRelationsExcept(ctx context.Context, orgID valuer.UUID, kind coretypes.Kind, resourceID valuer.UUID, keepTagIDs []valuer.UUID) error
|
||||
|
||||
RunInTx(ctx context.Context, cb func(ctx context.Context) error) error
|
||||
}
|
||||
113
pkg/types/tagtypes/tag.go
Normal file
113
pkg/types/tagtypes/tag.go
Normal file
@@ -0,0 +1,113 @@
|
||||
package tagtypes
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
const (
|
||||
MAX_LEN_TAG_KEY = 32
|
||||
MAX_LEN_TAG_VALUE = 32
|
||||
)
|
||||
|
||||
var (
|
||||
tagKeyRegex = regexp.MustCompile(`^[a-zA-Z$_@{#][a-zA-Z0-9$_@#{}:/-]*$`)
|
||||
tagValueRegex = regexp.MustCompile(`^[a-zA-Z0-9$_@#{}:.+=/-]*$`)
|
||||
|
||||
ErrCodeTagInvalidKey = errors.MustNewCode("tag_invalid_key")
|
||||
ErrCodeTagInvalidValue = errors.MustNewCode("tag_invalid_value")
|
||||
ErrCodeTagNotFound = errors.MustNewCode("tag_not_found")
|
||||
)
|
||||
|
||||
type Tag struct {
|
||||
bun.BaseModel `bun:"table:tag,alias:tag"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Key string `json:"key" required:"true" bun:"key,type:text,notnull"`
|
||||
Value string `json:"value" required:"true" bun:"value,type:text,notnull"`
|
||||
OrgID valuer.UUID `json:"orgId" required:"true" bun:"org_id,type:text,notnull"`
|
||||
Kind coretypes.Kind `json:"kind" required:"true" bun:"kind,type:text,notnull"`
|
||||
}
|
||||
|
||||
type PostableTag struct {
|
||||
Key string `json:"key" required:"true"`
|
||||
Value string `json:"value" required:"true"`
|
||||
}
|
||||
|
||||
type GettableTag = PostableTag
|
||||
|
||||
func NewGettableTagFromTag(tag *Tag) *GettableTag {
|
||||
return &GettableTag{Key: tag.Key, Value: tag.Value}
|
||||
}
|
||||
|
||||
func NewGettableTagsFromTags(tags []*Tag) []*GettableTag {
|
||||
out := make([]*GettableTag, len(tags))
|
||||
for i, t := range tags {
|
||||
out[i] = NewGettableTagFromTag(t)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func NewPostableTagFromTag(tag *Tag) PostableTag {
|
||||
return PostableTag{Key: tag.Key, Value: tag.Value}
|
||||
}
|
||||
|
||||
func NewPostableTagsFromTags(tags []*Tag) []PostableTag {
|
||||
out := make([]PostableTag, len(tags))
|
||||
for i, t := range tags {
|
||||
out[i] = NewPostableTagFromTag(t)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func NewTag(orgID valuer.UUID, kind coretypes.Kind, key, value string) *Tag {
|
||||
now := time.Now()
|
||||
return &Tag{
|
||||
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
},
|
||||
Key: key,
|
||||
Value: value,
|
||||
OrgID: orgID,
|
||||
Kind: kind,
|
||||
}
|
||||
}
|
||||
|
||||
// ValidatePostableTag trims and validates a user-supplied (key, value) pair.
|
||||
// Returns the cleaned values on success. Entity-specific reserved-key checks
|
||||
// (e.g. dashboard column names that would collide with the list-query DSL) are
|
||||
// the caller's responsibility — perform them before calling into the tag module.
|
||||
func ValidatePostableTag(p PostableTag) (string, string, error) {
|
||||
key := strings.TrimSpace(p.Key)
|
||||
value := strings.TrimSpace(p.Value)
|
||||
if key == "" {
|
||||
return "", "", errors.Newf(errors.TypeInvalidInput, ErrCodeTagInvalidKey, "tag key cannot be empty")
|
||||
}
|
||||
if value == "" {
|
||||
return "", "", errors.Newf(errors.TypeInvalidInput, ErrCodeTagInvalidValue, "tag value cannot be empty")
|
||||
}
|
||||
if !tagKeyRegex.MatchString(key) {
|
||||
return "", "", errors.Newf(errors.TypeInvalidInput, ErrCodeTagInvalidKey, "tag key %q contains disallowed characters", key)
|
||||
}
|
||||
if !tagValueRegex.MatchString(value) {
|
||||
return "", "", errors.Newf(errors.TypeInvalidInput, ErrCodeTagInvalidValue, "tag value %q contains disallowed characters", value)
|
||||
}
|
||||
if utf8.RuneCountInString(key) > MAX_LEN_TAG_KEY {
|
||||
return "", "", errors.Newf(errors.TypeInvalidInput, ErrCodeTagInvalidKey, "tag key %q exceeds the %d-character limit", key, MAX_LEN_TAG_KEY)
|
||||
}
|
||||
if utf8.RuneCountInString(value) > MAX_LEN_TAG_VALUE {
|
||||
return "", "", errors.Newf(errors.TypeInvalidInput, ErrCodeTagInvalidValue, "tag value %q exceeds the %d-character limit", value, MAX_LEN_TAG_VALUE)
|
||||
}
|
||||
return key, value, nil
|
||||
}
|
||||
38
pkg/types/tagtypes/tag_relation.go
Normal file
38
pkg/types/tagtypes/tag_relation.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package tagtypes
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type TagRelation struct {
|
||||
bun.BaseModel `bun:"table:tag_relation,alias:tag_relation"`
|
||||
|
||||
types.Identifiable
|
||||
Kind coretypes.Kind `json:"kind" required:"true" bun:"kind,type:text,notnull"`
|
||||
ResourceID valuer.UUID `json:"resourceId" required:"true" bun:"resource_id,type:text,notnull"`
|
||||
TagID valuer.UUID `json:"tagId" required:"true" bun:"tag_id,type:text,notnull"`
|
||||
CreatedAt time.Time `json:"createdAt" bun:"created_at,notnull"`
|
||||
}
|
||||
|
||||
func NewTagRelation(kind coretypes.Kind, resourceID valuer.UUID, tagID valuer.UUID) *TagRelation {
|
||||
return &TagRelation{
|
||||
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||
Kind: kind,
|
||||
ResourceID: resourceID,
|
||||
TagID: tagID,
|
||||
CreatedAt: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
func NewTagRelations(kind coretypes.Kind, resourceID valuer.UUID, tagIDs []valuer.UUID) []*TagRelation {
|
||||
relations := make([]*TagRelation, 0, len(tagIDs))
|
||||
for _, tagID := range tagIDs {
|
||||
relations = append(relations, NewTagRelation(kind, resourceID, tagID))
|
||||
}
|
||||
return relations
|
||||
}
|
||||
69
pkg/types/tagtypes/tag_test.go
Normal file
69
pkg/types/tagtypes/tag_test.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package tagtypes
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestValidatePostableTag(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input PostableTag
|
||||
wantKey string
|
||||
wantValue string
|
||||
wantError bool
|
||||
}{
|
||||
{name: "simple pair", input: PostableTag{Key: "team", Value: "pulse"}, wantKey: "team", wantValue: "pulse"},
|
||||
{name: "preserves casing", input: PostableTag{Key: "Team", Value: "Pulse"}, wantKey: "Team", wantValue: "Pulse"},
|
||||
{name: "trims key", input: PostableTag{Key: " team ", Value: "pulse"}, wantKey: "team", wantValue: "pulse"},
|
||||
{name: "trims value", input: PostableTag{Key: "team", Value: " pulse "}, wantKey: "team", wantValue: "pulse"},
|
||||
|
||||
{name: "empty key rejected", input: PostableTag{Key: "", Value: "pulse"}, wantError: true},
|
||||
{name: "empty value rejected", input: PostableTag{Key: "team", Value: ""}, wantError: true},
|
||||
{name: "whitespace-only key rejected", input: PostableTag{Key: " ", Value: "pulse"}, wantError: true},
|
||||
{name: "whitespace-only value rejected", input: PostableTag{Key: "team", Value: " "}, wantError: true},
|
||||
|
||||
{name: "slash accepted", input: PostableTag{Key: "team/eng", Value: "pulse/events"}, wantKey: "team/eng", wantValue: "pulse/events"},
|
||||
{name: "colon accepted", input: PostableTag{Key: "team:eng", Value: "env:prod"}, wantKey: "team:eng", wantValue: "env:prod"},
|
||||
{name: "extra punctuation accepted in both", input: PostableTag{Key: "a_b-c@d#e$f{g}h", Value: "a_b-c@d#e$f{g}h"}, wantKey: "a_b-c@d#e$f{g}h", wantValue: "a_b-c@d#e$f{g}h"},
|
||||
|
||||
// Key is strict; value allows the extra `. + =` plus leading digits.
|
||||
{name: "dot in key rejected", input: PostableTag{Key: "team.eng", Value: "pulse"}, wantError: true},
|
||||
{name: "dot in value accepted", input: PostableTag{Key: "team", Value: "pulse.events"}, wantKey: "team", wantValue: "pulse.events"},
|
||||
{name: "plus in key rejected", input: PostableTag{Key: "team+eng", Value: "pulse"}, wantError: true},
|
||||
{name: "plus in value accepted", input: PostableTag{Key: "team", Value: "a+b"}, wantKey: "team", wantValue: "a+b"},
|
||||
{name: "equals in key rejected", input: PostableTag{Key: "team=eng", Value: "pulse"}, wantError: true},
|
||||
{name: "equals in value accepted", input: PostableTag{Key: "team", Value: "a=b"}, wantKey: "team", wantValue: "a=b"},
|
||||
{name: "leading digit in key rejected", input: PostableTag{Key: "2024team", Value: "pulse"}, wantError: true},
|
||||
{name: "leading digit in value accepted", input: PostableTag{Key: "team", Value: "2024_team"}, wantKey: "team", wantValue: "2024_team"},
|
||||
|
||||
{name: "unicode letter in key rejected", input: PostableTag{Key: "チーム", Value: "pulse"}, wantError: true},
|
||||
{name: "unicode letter in value rejected", input: PostableTag{Key: "team", Value: "東京"}, wantError: true},
|
||||
|
||||
{name: "internal space in key rejected", input: PostableTag{Key: "team eng", Value: "pulse"}, wantError: true},
|
||||
{name: "internal space in value rejected", input: PostableTag{Key: "team", Value: "pulse two"}, wantError: true},
|
||||
|
||||
{name: "disallowed char in key rejected", input: PostableTag{Key: "team!eng", Value: "pulse"}, wantError: true},
|
||||
{name: "disallowed char in value rejected", input: PostableTag{Key: "team", Value: "pulse!one"}, wantError: true},
|
||||
{name: "control char rejected", input: PostableTag{Key: "team\tone", Value: "pulse"}, wantError: true},
|
||||
|
||||
{name: "key at the 32-char limit accepted", input: PostableTag{Key: "abcdefghijklmnopabcdefghijklmnop", Value: "pulse"}, wantKey: "abcdefghijklmnopabcdefghijklmnop", wantValue: "pulse"},
|
||||
{name: "value at the 32-char limit accepted", input: PostableTag{Key: "team", Value: "abcdefghijklmnopabcdefghijklmnop"}, wantKey: "team", wantValue: "abcdefghijklmnopabcdefghijklmnop"},
|
||||
{name: "key over the 32-char limit rejected", input: PostableTag{Key: "abcdefghijklmnopabcdefghijklmnopq", Value: "pulse"}, wantError: true},
|
||||
{name: "value over the 32-char limit rejected", input: PostableTag{Key: "team", Value: "abcdefghijklmnopabcdefghijklmnopq"}, wantError: true},
|
||||
}
|
||||
for _, tc := range tests {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
gotKey, gotValue, err := ValidatePostableTag(tc.input)
|
||||
if tc.wantError {
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tc.wantKey, gotKey)
|
||||
assert.Equal(t, tc.wantValue, gotValue)
|
||||
})
|
||||
}
|
||||
}
|
||||
53
pkg/types/tagtypes/tagtypestest/store.go
Normal file
53
pkg/types/tagtypes/tagtypestest/store.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package tagtypestest
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/coretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/tagtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// MockStore is an in-memory tagtypes.MockStore implementation for tests. Most methods
|
||||
// are inert no-ops; List returns the contents of Tags and increments
|
||||
// ListCallCount so tests can assert on lookup behavior. Set Tags directly to
|
||||
// preload fixtures.
|
||||
type MockStore struct {
|
||||
Tags []*tagtypes.Tag
|
||||
ListCallCount int
|
||||
}
|
||||
|
||||
func NewStore() *MockStore {
|
||||
return &MockStore{}
|
||||
}
|
||||
|
||||
func (s *MockStore) List(_ context.Context, _ valuer.UUID, _ coretypes.Kind) ([]*tagtypes.Tag, error) {
|
||||
s.ListCallCount++
|
||||
out := make([]*tagtypes.Tag, len(s.Tags))
|
||||
copy(out, s.Tags)
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (s *MockStore) CreateOrGet(_ context.Context, tags []*tagtypes.Tag) ([]*tagtypes.Tag, error) {
|
||||
return tags, nil
|
||||
}
|
||||
|
||||
func (s *MockStore) CreateRelations(_ context.Context, _ []*tagtypes.TagRelation) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *MockStore) ListByResource(_ context.Context, _ valuer.UUID, _ coretypes.Kind, _ valuer.UUID) ([]*tagtypes.Tag, error) {
|
||||
return []*tagtypes.Tag{}, nil
|
||||
}
|
||||
|
||||
func (s *MockStore) ListByResources(_ context.Context, _ valuer.UUID, _ coretypes.Kind, _ []valuer.UUID) (map[valuer.UUID][]*tagtypes.Tag, error) {
|
||||
return map[valuer.UUID][]*tagtypes.Tag{}, nil
|
||||
}
|
||||
|
||||
func (s *MockStore) DeleteRelationsExcept(_ context.Context, _ valuer.UUID, _ coretypes.Kind, _ valuer.UUID, _ []valuer.UUID) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *MockStore) RunInTx(ctx context.Context, cb func(ctx context.Context) error) error {
|
||||
return cb(ctx)
|
||||
}
|
||||
29
tests/fixtures/querier.py
vendored
29
tests/fixtures/querier.py
vendored
@@ -450,35 +450,6 @@ def build_scalar_query(
|
||||
return {"type": "builder_query", "spec": spec}
|
||||
|
||||
|
||||
def build_raw_query(
|
||||
name: str,
|
||||
signal: str,
|
||||
*,
|
||||
order: list[dict] | None = None,
|
||||
limit: int | None = None,
|
||||
filter_expression: str | None = None,
|
||||
step_interval: int = DEFAULT_STEP_INTERVAL,
|
||||
disabled: bool = False,
|
||||
) -> dict:
|
||||
spec: dict[str, Any] = {
|
||||
"name": name,
|
||||
"signal": signal,
|
||||
"stepInterval": step_interval,
|
||||
"disabled": disabled,
|
||||
}
|
||||
|
||||
if order:
|
||||
spec["order"] = order
|
||||
|
||||
if limit is not None:
|
||||
spec["limit"] = limit
|
||||
|
||||
if filter_expression:
|
||||
spec["filter"] = {"expression": filter_expression}
|
||||
|
||||
return {"type": "builder_query", "spec": spec}
|
||||
|
||||
|
||||
def build_group_by_field(
|
||||
name: str,
|
||||
field_data_type: str = "string",
|
||||
|
||||
@@ -11,7 +11,6 @@ from fixtures.logs import Logs
|
||||
from fixtures.querier import (
|
||||
build_logs_aggregation,
|
||||
build_order_by,
|
||||
build_raw_query,
|
||||
build_scalar_query,
|
||||
get_column_data_from_response,
|
||||
get_rows,
|
||||
@@ -28,33 +27,28 @@ def _run_query_case(signoz: types.SigNoz, token: str, now: datetime, case: dict[
|
||||
start_ms = case.get("startMs", int((now - timedelta(seconds=10)).timestamp() * 1000))
|
||||
end_ms = case.get("endMs", int(now.timestamp() * 1000))
|
||||
|
||||
if case["requestType"] == "raw":
|
||||
query = build_raw_query(
|
||||
name=case["name"],
|
||||
signal="logs",
|
||||
filter_expression=case.get("expression"),
|
||||
order=case.get("order") or [build_order_by("timestamp", "desc")],
|
||||
limit=case.get("limit", 100),
|
||||
step_interval=case.get("stepInterval") or 60,
|
||||
)
|
||||
aggregation = case.get("aggregation")
|
||||
if aggregation and not isinstance(aggregation, list):
|
||||
aggregations = [build_logs_aggregation(aggregation)]
|
||||
elif aggregation:
|
||||
aggregations = aggregation
|
||||
else:
|
||||
aggregation = case.get("aggregation")
|
||||
if aggregation and not isinstance(aggregation, list):
|
||||
aggregations = [build_logs_aggregation(aggregation)]
|
||||
elif aggregation:
|
||||
aggregations = aggregation
|
||||
else:
|
||||
aggregations = []
|
||||
query = build_scalar_query(
|
||||
name=case["name"],
|
||||
signal="logs",
|
||||
aggregations=aggregations,
|
||||
group_by=case.get("groupBy"),
|
||||
order=case.get("order"),
|
||||
limit=case.get("limit", 100),
|
||||
filter_expression=case.get("expression"),
|
||||
step_interval=case.get("stepInterval") or 60,
|
||||
)
|
||||
aggregations = []
|
||||
|
||||
order = case.get("order")
|
||||
if order is None and case["requestType"] == "raw":
|
||||
order = [build_order_by("timestamp", "desc")]
|
||||
|
||||
query = build_scalar_query(
|
||||
name=case["name"],
|
||||
signal="logs",
|
||||
aggregations=aggregations,
|
||||
group_by=case.get("groupBy"),
|
||||
order=order,
|
||||
limit=case.get("limit", 100),
|
||||
filter_expression=case.get("expression"),
|
||||
step_interval=case.get("stepInterval") or 60,
|
||||
)
|
||||
|
||||
response = make_query_request(
|
||||
signoz=signoz,
|
||||
@@ -642,9 +636,10 @@ def test_select_order_by(
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
def _run(case: dict[str, Any]) -> None:
|
||||
query = build_raw_query(
|
||||
query = build_scalar_query(
|
||||
name=case["name"],
|
||||
signal="logs",
|
||||
aggregations=[build_logs_aggregation("count()")],
|
||||
order=case["order"],
|
||||
limit=100,
|
||||
step_interval=60,
|
||||
|
||||
@@ -1,285 +0,0 @@
|
||||
import json
|
||||
from collections.abc import Callable
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logs import Logs
|
||||
from fixtures.querier import (
|
||||
build_group_by_field,
|
||||
build_logs_aggregation,
|
||||
build_order_by,
|
||||
build_raw_query,
|
||||
build_scalar_query,
|
||||
get_rows,
|
||||
get_scalar_table_data,
|
||||
make_query_request,
|
||||
)
|
||||
|
||||
|
||||
def _raw(
|
||||
signoz: types.SigNoz,
|
||||
token: str,
|
||||
start_ms: int,
|
||||
end_ms: int,
|
||||
name: str,
|
||||
*,
|
||||
expression: str | None = None,
|
||||
order: list[dict] | None = None,
|
||||
limit: int = 100,
|
||||
) -> requests.Response:
|
||||
q = build_raw_query(
|
||||
name=name,
|
||||
signal="logs",
|
||||
filter_expression=expression,
|
||||
order=order or [build_order_by("timestamp", "desc")],
|
||||
limit=limit,
|
||||
step_interval=60,
|
||||
)
|
||||
r = make_query_request(signoz, token, start_ms, end_ms, queries=[q], request_type="raw")
|
||||
assert r.status_code == 200, f"HTTP {r.status_code} for '{name}': {r.text}"
|
||||
return r
|
||||
|
||||
|
||||
def _scalar(
|
||||
signoz: types.SigNoz,
|
||||
token: str,
|
||||
start_ms: int,
|
||||
end_ms: int,
|
||||
name: str,
|
||||
aggregation: str,
|
||||
*,
|
||||
expression: str | None = None,
|
||||
group_by: list[dict] | None = None,
|
||||
) -> requests.Response:
|
||||
q = build_scalar_query(
|
||||
name=name,
|
||||
signal="logs",
|
||||
aggregations=[build_logs_aggregation(aggregation)],
|
||||
filter_expression=expression,
|
||||
group_by=group_by,
|
||||
step_interval=60,
|
||||
)
|
||||
r = make_query_request(signoz, token, start_ms, end_ms, queries=[q], request_type="scalar")
|
||||
assert r.status_code == 200, f"HTTP {r.status_code} for '{name}': {r.text}"
|
||||
return r
|
||||
|
||||
|
||||
def _body_users(response: requests.Response) -> set[str | None]:
|
||||
return {json.loads(row["data"]["body"]).get("user") for row in get_rows(response)}
|
||||
|
||||
|
||||
def _body_scores(response: requests.Response) -> list[int | None]:
|
||||
return [json.loads(row["data"]["body"]).get("score") for row in get_rows(response)]
|
||||
|
||||
|
||||
def _services(response: requests.Response) -> list[str]:
|
||||
return [row["data"]["resources_string"].get("service.name", "") for row in get_rows(response)]
|
||||
|
||||
|
||||
def _counts(response: requests.Response) -> dict[str, Any]:
|
||||
return {str(row[0]): row[-1] for row in get_scalar_table_data(response.json()) if row}
|
||||
|
||||
|
||||
def _run_case(
|
||||
signoz: types.SigNoz,
|
||||
token: str,
|
||||
start_ms: int,
|
||||
end_ms: int,
|
||||
case: dict[str, Any],
|
||||
) -> None:
|
||||
if case["requestType"] == "raw":
|
||||
response = _raw(signoz, token, start_ms, end_ms, case["name"], expression=case.get("expression"), order=case.get("order"))
|
||||
else:
|
||||
response = _scalar(signoz, token, start_ms, end_ms, case["name"], case["aggregation"], expression=case.get("expression"), group_by=case.get("groupBy"))
|
||||
assert case["validate"](response), f"Validation failed for '{case['name']}': {response.json()}"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Filter · GroupBy · Aggregation — non-body fields across all three contexts
|
||||
#
|
||||
# Five cases, one dataset. Each case crosses a different combination of
|
||||
# resource attr / log attr / top-level field in WHERE, GROUP BY, and agg:
|
||||
#
|
||||
# case 1 filter resource + log attr + top-level in WHERE (raw)
|
||||
# case 2 group by resource × top-level multi-key (scalar)
|
||||
# case 3 aggregation count_distinct(log attr) grouped by top-level (scalar)
|
||||
# case 4 agg+filter count by resource, body-field WHERE guard (scalar)
|
||||
# case 5 agg+filter count_distinct(resource) by log attr, top-level filter (scalar)
|
||||
#
|
||||
# Data landscape (5 logs):
|
||||
# log1 — auth-svc, GET, INFO, score=80, user=alice
|
||||
# log2 — auth-svc, POST, ERROR, score=90, user=bob
|
||||
# log3 — auth-svc, GET, INFO, score=60, user=carol
|
||||
# log4 — api-gw, GET, WARN, score=70, user=diana
|
||||
# log5 — worker, DELETE, ERROR, score=100, user=eve
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def test_non_body_filter_groupby_aggregation(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[list[Logs]], None],
|
||||
export_json_types: Callable[[list[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=UTC)
|
||||
start_ms = int((now - timedelta(seconds=10)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
log_data = [
|
||||
("auth-svc", "GET", "INFO", {"score": 80, "user": "alice"}),
|
||||
("auth-svc", "POST", "ERROR", {"score": 90, "user": "bob"}),
|
||||
("auth-svc", "GET", "INFO", {"score": 60, "user": "carol"}),
|
||||
("api-gw", "GET", "WARN", {"score": 70, "user": "diana"}),
|
||||
("worker", "DELETE", "ERROR", {"score": 100, "user": "eve"}),
|
||||
]
|
||||
logs_list = [
|
||||
Logs(
|
||||
timestamp=now - timedelta(seconds=len(log_data) - i),
|
||||
resources={"service.name": svc},
|
||||
attributes={"http.method": method},
|
||||
body_v2=json.dumps(body),
|
||||
body_promoted="",
|
||||
severity_text=sev,
|
||||
)
|
||||
for i, (svc, method, sev, body) in enumerate(log_data)
|
||||
]
|
||||
export_json_types(logs_list)
|
||||
insert_logs(logs_list)
|
||||
token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
|
||||
|
||||
cases = [
|
||||
# 1. Filter — resource + log attr + top-level in WHERE (all three non-body contexts at once)
|
||||
{
|
||||
"name": "filter.cross_context",
|
||||
"requestType": "raw",
|
||||
"expression": 'service.name = "auth-svc" AND http.method = "GET" AND severity_text = "INFO"',
|
||||
"validate": lambda r: len(get_rows(r)) == 2 and _body_users(r) == {"alice", "carol"},
|
||||
},
|
||||
# 2. GroupBy — resource × top-level multi-key, no filter
|
||||
# Proves both contexts resolve correctly as simultaneous GROUP BY keys.
|
||||
{
|
||||
"name": "groupby.resource_x_toplevel",
|
||||
"requestType": "scalar",
|
||||
"expression": None,
|
||||
"groupBy": [build_group_by_field("service.name"), {"name": "severity_text"}],
|
||||
"aggregation": "count()",
|
||||
# auth-svc+INFO=2, auth-svc+ERROR=1, api-gw+WARN=1, worker+ERROR=1
|
||||
"validate": lambda r: (p := {(str(row[0]), str(row[1])): row[-1] for row in get_scalar_table_data(r.json()) if len(row) >= 3}) and p.get(("auth-svc", "INFO")) == 2 and p.get(("auth-svc", "ERROR")) == 1 and p.get(("api-gw", "WARN")) == 1 and p.get(("worker", "ERROR")) == 1,
|
||||
},
|
||||
# 3. Aggregation — count_distinct(log attr) grouped by top-level
|
||||
# ERROR logs use {POST, DELETE} → 2 distinct methods; INFO/WARN use only GET → 1.
|
||||
{
|
||||
"name": "agg.count_distinct_attr_by_toplevel",
|
||||
"requestType": "scalar",
|
||||
"expression": None,
|
||||
"groupBy": [{"name": "severity_text"}],
|
||||
"aggregation": "count_distinct(http.method)",
|
||||
"validate": lambda r: (rows := _counts(r)) and int(rows["INFO"]) == 1 and int(rows["ERROR"]) == 2 and int(rows["WARN"]) == 1,
|
||||
},
|
||||
# 4. Aggregation + body filter — count by resource WHERE body score >= 80
|
||||
# Body field gates the logs; non-body field drives the GROUP BY.
|
||||
{
|
||||
"name": "agg.count_by_resource_body_filter",
|
||||
"requestType": "scalar",
|
||||
"expression": "score >= 80",
|
||||
"groupBy": [build_group_by_field("service.name")],
|
||||
"aggregation": "count()",
|
||||
# score>=80: alice(80), bob(90), eve(100) → auth-svc: 2, worker: 1; api-gw excluded
|
||||
"validate": lambda r: (rows := _counts(r)) and int(rows["auth-svc"]) == 2 and int(rows["worker"]) == 1 and "api-gw" not in rows,
|
||||
},
|
||||
# 5. Aggregation + top-level filter — count_distinct(resource) grouped by log attr
|
||||
# Aggregates a resource attr, groups by a log attr, filtered by a top-level field.
|
||||
{
|
||||
"name": "agg.count_distinct_resource_by_attr_toplevel_filter",
|
||||
"requestType": "scalar",
|
||||
"expression": "severity_text IN ['INFO', 'WARN']",
|
||||
"groupBy": [{"name": "http.method"}],
|
||||
"aggregation": "count_distinct(service.name)",
|
||||
# INFO/WARN logs: GET(auth-svc×2, api-gw) → 2 distinct svcs; POST/DELETE excluded
|
||||
"validate": lambda r: (rows := _counts(r)) and int(rows["GET"]) == 2 and "POST" not in rows and "DELETE" not in rows,
|
||||
},
|
||||
]
|
||||
|
||||
for case in cases:
|
||||
case.setdefault("groupBy", None)
|
||||
_run_case(signoz, token, start_ms, end_ms, case)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# OrderBy — non-body fields as primary sort keys
|
||||
#
|
||||
# Four cases cover every non-body context as the primary ORDER BY key:
|
||||
# orderby.service_asc resource attr (service.name ASC)
|
||||
# orderby.timestamp_desc top-level (timestamp DESC)
|
||||
# orderby.severity_asc top-level (severity_text ASC)
|
||||
# orderby.multi_method_then_score log attr primary, body path secondary
|
||||
#
|
||||
# Data landscape:
|
||||
# log1 — svc-a, GET, INFO, score=80, ts=now-4s
|
||||
# log2 — svc-a, POST, INFO, score=90, ts=now-3s
|
||||
# log3 — svc-b, GET, WARN, score=60, ts=now-2s
|
||||
# log4 — svc-b, DELETE, WARN, score=70, ts=now-1s
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def test_non_body_orderby(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_logs: Callable[[list[Logs]], None],
|
||||
export_json_types: Callable[[list[Logs]], None],
|
||||
) -> None:
|
||||
now = datetime.now(tz=UTC)
|
||||
start_ms = int((now - timedelta(seconds=10)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
logs_list = [
|
||||
Logs(timestamp=now - timedelta(seconds=4), resources={"service.name": "svc-a"}, attributes={"http.method": "GET"}, body_v2=json.dumps({"score": 80}), body_promoted="", severity_text="INFO"),
|
||||
Logs(timestamp=now - timedelta(seconds=3), resources={"service.name": "svc-a"}, attributes={"http.method": "POST"}, body_v2=json.dumps({"score": 90}), body_promoted="", severity_text="INFO"),
|
||||
Logs(timestamp=now - timedelta(seconds=2), resources={"service.name": "svc-b"}, attributes={"http.method": "GET"}, body_v2=json.dumps({"score": 60}), body_promoted="", severity_text="WARN"),
|
||||
Logs(timestamp=now - timedelta(seconds=1), resources={"service.name": "svc-b"}, attributes={"http.method": "DELETE"}, body_v2=json.dumps({"score": 70}), body_promoted="", severity_text="WARN"),
|
||||
]
|
||||
export_json_types(logs_list)
|
||||
insert_logs(logs_list)
|
||||
token = get_token(email=USER_ADMIN_EMAIL, password=USER_ADMIN_PASSWORD)
|
||||
|
||||
cases = [
|
||||
# resource attr ASC: svc-a×2 before svc-b×2
|
||||
{
|
||||
"name": "orderby.service_asc",
|
||||
"requestType": "raw",
|
||||
"order": [build_order_by("service.name", "asc")],
|
||||
"validate": lambda r: len(get_rows(r)) == 4 and _services(r)[:2] == ["svc-a", "svc-a"] and _services(r)[2:] == ["svc-b", "svc-b"],
|
||||
},
|
||||
# top-level timestamp DESC: ts-1s(svc-b/70), ts-2s(svc-b/60), ts-3s(svc-a/90), ts-4s(svc-a/80)
|
||||
{
|
||||
"name": "orderby.timestamp_desc",
|
||||
"requestType": "raw",
|
||||
"order": [build_order_by("timestamp", "desc")],
|
||||
"validate": lambda r: len(get_rows(r)) == 4 and _body_scores(r) == [70, 60, 90, 80] and _services(r) == ["svc-b", "svc-b", "svc-a", "svc-a"],
|
||||
},
|
||||
# top-level severity_text ASC: INFO(svc-a×2) before WARN(svc-b×2)
|
||||
{
|
||||
"name": "orderby.severity_asc",
|
||||
"requestType": "raw",
|
||||
"order": [build_order_by("severity_text", "asc")],
|
||||
"validate": lambda r: len(get_rows(r)) == 4 and _services(r)[:2] == ["svc-a", "svc-a"] and _services(r)[2:] == ["svc-b", "svc-b"],
|
||||
},
|
||||
# multi-key: http.method ASC then score ASC — DELETE(70), GET(60,80), POST(90)
|
||||
{
|
||||
"name": "orderby.multi_method_then_score",
|
||||
"requestType": "raw",
|
||||
"order": [build_order_by("http.method", "asc"), build_order_by("score", "asc")],
|
||||
# DELETE < GET < POST alphabetically; within GET scores go 60→80
|
||||
"validate": lambda r: len(get_rows(r)) == 4 and _body_scores(r) == [70, 60, 80, 90],
|
||||
},
|
||||
]
|
||||
|
||||
for case in cases:
|
||||
case.setdefault("groupBy", None)
|
||||
_run_case(signoz, token, start_ms, end_ms, case)
|
||||
Reference in New Issue
Block a user