Compare commits

..

6 Commits

Author SHA1 Message Date
primus-bot[bot]
bb3ccfae3b chore(release): bump to v0.109.2 (#10130)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2026-01-28 13:54:24 +05:30
Ashwin Bhatkal
08f261b0f0 chore: add node version in gor yaml files (#10128) 2026-01-28 07:18:11 +00:00
primus-bot[bot]
9b4fd46d2c chore(release): bump to v0.109.0 (#10127)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2026-01-28 12:13:32 +05:30
Tushar Vats
f348f47951 feat: improved keys adjustment logic (#9927)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
This pull request introduces a new utility file, `collision.go`, to the `querybuilder` package, which provides robust logic for handling duplicate and ambiguous field keys in query builder queries. The new functions ensure that queries are more resilient to naming collisions and ambiguities, especially when dealing with JSON and promoted fields. Additionally, several test cases have been updated to reflect improved handling and warning messages for ambiguous keys.
2026-01-28 11:25:14 +05:30
Yunus M
9d700563c1 fix: handle custom time ranges in timezones correctly (#10094)
* feat: handle custom time ranges in timezones correctly

* fix: improve timezone UI

* fix: prettier issues

* fix: show time in user selected timezone, overflow issue (#10096)

* fix: show time in user selected timezone, overflow issue, metadata loading

* fix: add gap to popover row

* fix: use inline conditional render for startTimeMs

* fix: remove unused variable in CustomTimePicker

* fix: remove unused variable in CustomTimePicker, TraceMetadata

* fix: update timezone mock in SpanHoverCard test case
2026-01-28 10:30:40 +05:30
Vishal Sharma
54b80c3949 feat: add new onboarding partner logos and update documentation to enforce SVG format (#10120)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-01-27 17:33:22 +00:00
42 changed files with 2861 additions and 327 deletions

View File

@@ -42,7 +42,7 @@ services:
timeout: 5s
retries: 3
schema-migrator-sync:
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.13
container_name: schema-migrator-sync
command:
- sync
@@ -55,7 +55,7 @@ services:
condition: service_healthy
restart: on-failure
schema-migrator-async:
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.13
container_name: schema-migrator-async
command:
- async

View File

@@ -3,8 +3,8 @@ name: gor-signoz-community
on:
push:
tags:
- 'v[0-9]+.[0-9]+.[0-9]+'
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
- "v[0-9]+.[0-9]+.[0-9]+"
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
permissions:
contents: write
@@ -21,6 +21,10 @@ jobs:
shell: bash
run: |
echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
- name: node-setup
uses: actions/setup-node@v5
with:
node-version: "22"
- name: build-frontend
run: make js-build
- name: upload-frontend-artifact
@@ -89,7 +93,7 @@ jobs:
uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser-pro
version: '~> v2'
version: "~> v2"
args: release --config ${{ env.CONFIG_PATH }} --clean --split
workdir: .
env:
@@ -147,7 +151,7 @@ jobs:
if: steps.cache-linux.outputs.cache-hit == 'true' && steps.cache-darwin.outputs.cache-hit == 'true' # only run if caches hit
with:
distribution: goreleaser-pro
version: '~> v2'
version: "~> v2"
args: continue --merge
workdir: .
env:

View File

@@ -37,6 +37,10 @@ jobs:
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> .env
echo 'DOCS_BASE_URL="https://signoz.io"' >> .env
- name: node-setup
uses: actions/setup-node@v5
with:
node-version: "22"
- name: build-frontend
run: make js-build
- name: upload-frontend-artifact

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.108.0
image: signoz/signoz:v0.109.2
command:
- --config=/root/config/prometheus.yml
ports:
@@ -209,7 +209,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.12
image: signoz/signoz-otel-collector:v0.129.13
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -233,7 +233,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.13
deploy:
restart_policy:
condition: on-failure

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.108.0
image: signoz/signoz:v0.109.2
command:
- --config=/root/config/prometheus.yml
ports:
@@ -150,7 +150,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:v0.129.12
image: signoz/signoz-otel-collector:v0.129.13
command:
- --config=/etc/otel-collector-config.yaml
- --manager-config=/etc/manager-config.yaml
@@ -176,7 +176,7 @@ services:
- signoz
schema-migrator:
!!merge <<: *common
image: signoz/signoz-schema-migrator:v0.129.12
image: signoz/signoz-schema-migrator:v0.129.13
deploy:
restart_policy:
condition: on-failure

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.108.0}
image: signoz/signoz:${VERSION:-v0.109.2}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -213,7 +213,7 @@ services:
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.13}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -239,7 +239,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
container_name: schema-migrator-sync
command:
- sync
@@ -250,7 +250,7 @@ services:
condition: service_healthy
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
container_name: schema-migrator-async
command:
- async

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.108.0}
image: signoz/signoz:${VERSION:-v0.109.2}
container_name: signoz
command:
- --config=/root/config/prometheus.yml
@@ -144,7 +144,7 @@ services:
retries: 3
otel-collector:
!!merge <<: *db-depend
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.13}
container_name: signoz-otel-collector
command:
- --config=/etc/otel-collector-config.yaml
@@ -166,7 +166,7 @@ services:
condition: service_healthy
schema-migrator-sync:
!!merge <<: *common
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
container_name: schema-migrator-sync
command:
- sync
@@ -178,7 +178,7 @@ services:
restart: on-failure
schema-migrator-async:
!!merge <<: *db-depend
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
container_name: schema-migrator-async
command:
- async

View File

@@ -24,7 +24,7 @@ The configuration file is a JSON array containing data source objects. Each obje
| `label` | `string` | Display name shown to users (e.g., `"AWS EC2"`) |
| `tags` | `string[]` | Array of category tags for grouping (e.g., `["AWS"]`, `["database"]`) |
| `module` | `string` | Destination module after onboarding completion |
| `imgUrl` | `string` | Path to the logo/icon (e.g., `"/Logos/ec2.svg"`) |
| `imgUrl` | `string` | Path to the logo/icon **(SVG required)** (e.g., `"/Logos/ec2.svg"`) |
### Optional Keys

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 11 KiB

View File

@@ -0,0 +1 @@
<svg fill="currentColor" fill-rule="evenodd" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Groq</title><path d="M12.036 2c-3.853-.035-7 3-7.036 6.781-.035 3.782 3.055 6.872 6.908 6.907h2.42v-2.566h-2.292c-2.407.028-4.38-1.866-4.408-4.23-.029-2.362 1.901-4.298 4.308-4.326h.1c2.407 0 4.358 1.915 4.365 4.278v6.305c0 2.342-1.944 4.25-4.323 4.279a4.375 4.375 0 01-3.033-1.252l-1.851 1.818A7 7 0 0012.029 22h.092c3.803-.056 6.858-3.083 6.879-6.816v-6.5C18.907 4.963 15.817 2 12.036 2z"></path></svg>

After

Width:  |  Height:  |  Size: 568 B

View File

@@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" width="200" height="200" viewBox="0 0 200 200" preserveAspectRatio="xMidYMid meet">
<image width="200" height="200" href="data:image/webp;base64,UklGRuINAABXRUJQVlA4WAoAAAAQAAAAxwAAxwAAQUxQSEMFAAABoLz+/6M4+7ogM2lSYJ6SJrj4L3h1+x5SBO+Snph+38I8vfiU9N6jsEdTPG16Ipue7Jree0QYnirID2f5HmxTfr/v7xoREwBc9nwVjaf0/bnzd599NiqVaaxUyqVnty/l9uupeFQDGqs9wzkDbTbO6P0qaQKDQ2NTJjrULGZHBgMkCS47Y6Lj2ZllGi3U2PeFKrq0WvghplAhvMtAlxvbIgSYuy5fQw7W8hvmii1yzERuskxYWEpyvIpcZfmkIqKQ/h45/DndKRptG0NOs21+kcxP/40cL6dDoujYWEHO/73VK4T+Igpwspd/czIoyIyfb8qaVyjMF2sUfnmWPEahPkl4eHUYhbubT91TKOCHvRxKmChkM84b3+8MBc3SPq7Mvo4Cv65ypPcjCr00wI1FDAXPFnFi1ETx6zzwnkASZr2uU68hES+pLvOfQzKeC7pKe4OEfKO5SJtAUk5orgm+QWK+8btEPYfkPKe6wnsJCXrN64YskvS4C3Qk6qjjFiFVzUUOG2BkQdbjKLWEhP0420G+60ja6z7npJG4vzsmzqjDEg7pMZG8ZrczHiKBp5zg2Y0kPuyxL4FEXmyb8pRKjxW7ViOZV9vkf0GnV3PsySChM7b0Iqn7bPBO0qrYYd1WJPZGy0J/U6sy36o0kvtXi/xlev2lWbMNCb7Nkk5GMRayIo0k1y1QPtPsvdJeEomebC9PtfG2woxq1Ug7GST7sTbmMrqZc1vbgIRf11qecvmWIjXK1cKtbEPS72pBMWhnqM1iSPxYsx+o912zAvUKTbQq9arBRsuQ/Msanabf6QYBRj8zUDeIEjhYNywDQ3VZGRirK8rAJACopgyYKkAfSmEPgC4HwwCn5SAHYMiB4dFQEr+KykI0LgvxlCykdFnQ98nC/pws5C7JwvnbsnD3mSw8K8nCp7IsGBVZqMgDyuL0tCxgRRYq8mDIQvmTLJSeycKzu7Jw+7wsXMrJQm6/LOzTZUFPyUIqLgvxqCxEv5IFzWPIgQGQk4PTAMNyoAP0yEEfgGrKgKkCwKQMFAEAxmQgWzckA8N1gzIwWBcw6ccCdXCafnlouIx+yxoFq9Srao2gQL0CNP2Oej80i1Ev1kw1aGcozWAX7bZBi+Ea5WqRViBPuTy0vI5yG1qba9KNzW0NjtEtA21GqlRj4XZgnGp5aDtJtWR7ynuafVbaA51mabAwxCjGOq2AbRTbBpZqf9Gr7LcGfqVXGiyeX6HW3yGrYCO1toLlHUVaTXqtgz5a9YKdGUplwNY5r+j0wm8PrKbTarBZeUylp4pdsJhKCbDdc5hGuz32AUxR6CE4stukj9njDEgw6rA4OPV36qTBsb7rtLnucw7M/kiZkgpO7mF0YQPg7EUmWRaB00epooPzj9MkCy70XqPIJa8bQD1Hj3MquNP/hhpvguBWbYIWExq4V3tDiTcauDl4jg7n/OBu9RIVrqngdm+WBie8wEGdAOYo8HEREx1bBLwcKIntYy/wU70usuuzgae+NBMV+90HnI2bYjITwN/ehyKa6gYu7xbPYeC0J/FELI+XeHgFoKx5IY5XaxTguj8jiswc4H7vpAiK/SBC79a/eVfZ2AGCDKXLPPs7PR8E6t/GeMW2aSDYzvRnHr3XQyBgJZlnfKmOJxUQdTjD+GEei4DQ527I13hQy6+bC+KPbDPcZuwKAxGV2A+Fqluqhe9jKpBSW5ZnzjPPLAsCRQODI9mi6RRzamxoMACUVfv1M4ZdRm64RwUaa9F4St+fu3T7WalcqeB0pWJ8fnb3fG6/nopHv/IAlwEAVlA4IHgIAACwOACdASrIAMgAPm0yk0YkIyGhL7cowIANiWlu4ME0Z2dc36Rf1TwF/nn82/JL256O/rLnEfsT+C/sHnB3g+7b+q9QL8a/on6Tb4Tj/+M9ALu//uu6A9B/rT/mfcA/Tv/W+g/8e8Ab6l/afYA/iX9A/6n+T9RP/H/vvnE/Mv7r/0P8N8CP8e/m3/O/s/an9Bz9JzCMO+Myw4dZdESvjaIP6nMyViAm4nbqrdpXzuF+/VTRlZPwI47fmfvDl5hbzvvZesvGUKjN7ws4T3LThWfoRQ4AoAA8e9lYL1nVnC85qq4mvj9Iph8YxivL/rrIH+nx3GNtE7c2oXhy5gEx47Z8sNva+odGL0UOFre7a/8pcCVRR+VAqOA+CSTUwrA/FB3RytCzvsm/UJ3htyMkuiFBPkIQkgeyZRBek+/i/YVS2uzhmupStTtUD66oGcS5+EVAH8fWxWkDLcyezIzzEhXFpHDKBmiU8U3CGZPeURs5O40NHmZ1yuL/LSebx3inBTn4fScXY5NtGQmTuvJRgK1ZkPKfur71V57K9aGxoHcFJWEWoT+wdJqn/+CEXgxOXAsN7CY9yIhMXtIKmxfysDoOvJcu/r+kfQ+KAF12SAAA/tznX6ah3T+0kRzuFzn2fQ8fFQ3Jwhf+PAnqgJuUaDbeHheuyuveOy2cK7s60D6dQJ/6PJXvNmeGbDhD3I1NjZn9oc4JUzlHaJP8whuvwQH/+01Q425kRC4hJihTqmiFP5nRhpc7iojBlxQWo4EVUGzU3wuFln27vn07cdcSf8WjpT+zeWz4PVcN8pkVGsiKq7cBNzA3mqWriMtC2CSTtTYTdjMvF7Ijau6AcNe7AIgk6dia7G1bv5y7LBzTXH6/6CeEBeH+MU/ERnSMlwi7VF5hFXzLzRHBV/v1swHdmHAOmY60klxbO1sY26jVQdRXsf4BTv4aJ1LVbXI3ZFDBhVXuY/q0r3zY1ve75gXWrzLOdUpmWVI+ohESBQUZ+rU+U6osFlcstDVHqCNBEOiT1jawA4oQoqrxGTonFFIGpeu2Xj8UTnLeFE2Md3o2FwYCri8tnQ5YN/XViP9pfXkt2qOy/yEtKZZNhoBmIHfHbQrYCz5Kh2bdgqFw5O1uy/Hou6pfwPWz3f2JocCmu2hqd32lMlF7X6xR+B7KcNvMTXv9GkELd8Xu/Kfpoax7QPHsq9RNLcdLDme5+5J8r5I31b/bsb2Dr3veZ5TYKOfV6S+zqm3Pm3su3t6C2kn7SOtPaXrXGBgTZapBKzj5rvfobSFdc/F15bMv/SRizlLib5FfiLOiZ6k/j6LGHT37iY4b1+TQFbOGGn+tY5WRq6SisawiHhcRzE9LURYQtib7q0jxkwQBuBCXPTBOJAkaAG7Vp5dU+qxrV88x9ASUpuVyMu1qzoKu7SKuhpoqbqdcdNWiBV+lVb41JHBEJ7oeHuTt/PyKeXXDdZ5ohET/Y75lxzxI6uzQ5r4IGRvLWul1kvJbP1nnNsAoQhO0tt0L60Pp7S5cTCavKf2FyYs/nxKXq6DKJtk8uRIycOOtWfyvZHYkjulFNNFR/IjND9uujdq66p3qRvf1Nm8SZOIwpni9IrNC9uOzRXnq9BhaDPbRNwLK2DHcYuH1nwzm4cUSUBu7gk5JC5UGE1CfP0TLXwWWr+6JUra/cyPfLM1oeynDbzAToAPUuWA2L9545vaFlmshP8Nj7r96uNDW6ngAyjF0RPM4pH5pGHzui7GI9KmamJujUcZHQLh3/w1hSn4IJdpYUoZMQHC3iKNFeOvi94ZueafCvFyASXifsLgpzzwNt9z5juYKX+CJES84eVuQ5+BncnQF6BZqo2ezHyrmL/ctjxvWpRnj33RIWMu1kccgXDdGBgevRfNIHxouiYJs6eE/Gt593jZcGq/L55a1l9OO/jzhuDz4lDbhFfmbpK/sEfDFeY3XHjDMj+TkeVBLAbJ5EVVKCYeWVurm5iroOfGnBuUSzeP2mKC2ENA3HaMy2jTzGV7FMun64t97WY1zOM8R5FG7QyAeZuNQos1KH15jO1Dymt2/DO6bHgFWloeRbpv+sovdYJSx2/gjgvYHEO2VwRFao5Bny2/MA31GzjN4eTyWvA16rhRu0NMu477gNZEaCRgHvcTVDchTfVckI4Cx4gd4YhUAgjKD5wvCFwX8upRTi2Rdy3FeXDhmw3xEntdU9rGdmncE7jFw+s+vAR2v+V2PjV7jAf9YvwKTdX8v/49RppBE21OCxhVheNMphMDyhtGRopafi54pihoCFgfyYzNzbyFH5voOmGEIIE+cSgHxkMFNbiR3/7BbwcP5SBwy6u/vmP02xcKsRdFj1UW7ZLxdk8dzs8oZEioxMnOR2rxrQ+N9z5nc6U7u40DDVEdwbJuyyQHU/LiE4wmO1N3bxC33X10I8gHmjukAh/Axq6s/eRqK1hwiHkR98coaA6CjFu3hAWv3mwnvK2Y+j/MzAuzOaxEiEMKOJp8JB+dsF7YyNwRM9+pFGSmF95KwrPdEM+/ZUBRDawKe5Kh0NkfZPSesQvMnn3ZgX+3P8ebe1SHo/4p00CyCfNiZ+efcxFg/mw1jqPsN7MOOv+lOglbWG3QEqlDs3OqCKIzANydcibObrwLNDGusfzk42/iksBPVgCgkil6O65u7r5O+q6NhFJRIaVVtGEYSUXNrHnsnBz30tuyBsc1bP9wF3KI7iNCPcDEV+4lT5OMJp5vKeg6p4BGfY76Aap/wY5Pdu/JHo3yI47AqoWq3Z5/P+O6mBnP3a6N/BdS62DLwp0WWwLAn/k1aiJr2oMje765btFUIiGn0MJ3o4nk4ANHxLRR9HrZndMvIAcAOBq5offJZ+HpBeAQ5WEqiPOd5kpWNg0wkdJCMpHwJ4AAAAA==" />
</svg>

After

Width:  |  Height:  |  Size: 4.8 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 14 KiB

View File

@@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" width="256" height="256" viewBox="0 0 256 256" preserveAspectRatio="xMidYMid meet">
<image width="256" height="256" href="data:image/webp;base64,UklGRjoOAABXRUJQVlA4WAoAAAAQAAAA/wAA/wAAQUxQSLsGAAAB8IZtE+q22baNJFsOrKIpqC01lLw1jOWayszMTGFmZo6s0nWVmQzXFeb4KoUjwwVO1BguV3HBDIL5YZqZNda/bIqICYCw/8P+DxvYmulyt2AX/ce+2UkyIm5/v8iHWra4XZnWDt1ciF178NP+Au7/H2pbeHN7did2/dV3cfX4HDV22tuw52AoGHicp8c+1DnH3sqJoaH/OpavUWsnANyEoeKvPRmeQL1vAmthyIBT6ezlihVaMzB0rLCRPYKap2eFEHg12beqZblDiclkv6nmbg4l3qEyUPVmDCW/pOqnGwZDifepLtAt6AklFlBBnWqer0OJu8j2qPb1UyFEw7lkk1V7yqgKHf4G5PGNilUZMClkaEygg+WKTQKI/DlUmACMRrFaP0cCQL/S0OADCwdcekap0n7QekBRKOC0AW+KR6WiAdC2scHf1VU+COwXvK+Pf4MBHUxaW9qFBX58tTtITMmqVKV0bRJ01nHDXV3z7aPOBbkJaXdpeYMDwv4/C6GR+uJYLV/OPF+MdeCjY8W/ekusnJQHxtK/mGp0JiPXh5oGdtxlkeBY5UVz5j8aISFmYSny+nIzOtJnM+p7IJnNNr0RzXtsMN9LNShwc592RnhR49oMph7/RFM3PcBk/whleke0MbAadW66jiVyG5o8cBeL5TOUWj0QAIwS1Pq3OI7laPraJI7xKLfEAFiOen/IMNBvPtzG0L9BEC6H6AbFgol0X6GGY+g2ouSG6NdQ80VkF/pUeJfM/qcofC1PtYNkD6CKZWTXouy8MtVaIqgW64AxVG8KK0Pd46neV2Iw1VJh2juoPtJtjbR63XpQOZVIppohrP6gauVA/YYOTRFUDwo7uEa1T8kG6bAbqHsJWzNKtTvJoEiFF8hgj6xRkK/Yf6x0L2pwpjvdnaLyAUYE9LoF6COOKPAMMG4XFBgBANPU2gCcl1ab7gvg7OuVMw1ab1LqcxsLpNabbEd3Fhh6RsomaHuCT6HgQhswDykxlcsOzJccF+GbAO1fnq3O7pHA32P+X6b5MRX47ZOq+LIvhw4nTMx2e7Qsypt5Ocjs+dDffjzlkV68dd4wkBl1tyv/pIfcnT0xAcL+P9twwsRst4f8ZL7rnighQ+duLvaI//FvjxhCLpuZW+TR0p09MaFjl2cje9Vku4AbfkSz/rWop4DhO1Hb7Ms7MMGHEo9fwmXfiGY+NYzLuiCI+vomtLMJhf4+nCdqG5q7IZXH9gnqvKmNaSjW24/lUzR7bQrLWtR6GgCMCMjBnRxPovmPRzDciGoHRgDko+S76Lr/XwF8ic5aqBfmwygUfYDuOdTwv3S3o+aj1sjCvmTbVcBhZB+ptuagsIepbI06jCX7VbWD9cLmUF2EOrqouqPq9Sh8LdVgJT6n6qOb+JVUVyjxAVW0cmXCxlFdqMQyKmuzamV5wq6ngtM6PEQFP6uW95qs6iiyt1TwxZLNU+216AZRm4B8pArfAHmCX7GGaFguqdFBB/9QIDCIDv6m2HIAo0TQRGBMrDXfCmCMrVSrxACAgdVivrRwwB0Bs+2I5IBrGpSqHgitR3iFfGoH3vuazLWlB/CmV6vkHQFt99ksoeY1C3APPmKipuk24E48oNDmPtDBjFwfU9nCWBBoe/iASbxrHCDQctf2gCq+3AzopJH64lj6B1MsIDX2llfHin9soBWknpfx8lgtX0w1IOz/sxA6briL/LbhhhzrxZl3aXltX0GxV90l/gZHp5LWliKrf++zdhGj3/8DNT251CEiZnYBmrJ0bVJHjA1+5D91M1+fb1HdlsVRbLZJdWha/wajnQFFKHO+hWmMFzU+GMd07lY0ddGANvqVotQVPCMbUOeiGJYe/0KTl/YDgMifUe6jHLEVqPU2C8ff0fQ/RwJMQsF/RDN8gHo/zZCJCk4Co0oSLqG7LKjY6Qi6nzSoMp5C0ZU2spWo+a1kQ1DFp76WhaPJilVzkc3U4WuPsNepzkHVD5J9r4MnKGw5VYpulWSHdQii8LVUg3X7i+y4DtgsbAFVsm6nyX7Wodkt7EmqSL9qu8k+1sGdJSyJCvJVW0D2kg5ZGbKOAfk41QaT9Q6okG4tFPUU3fm1iu0B+s81KLTCTZKORdDBJL2CoxgSGhW4CQCcchquAEbbHrUWAOcL5nMCANhzpLTcCqzRJ5T6yMICS82WY28FdqeMymuB+fzNKi22APM4n6mcdmj35kK+lndjgN3yfIU6B68B/kF7zFN4M3TUmulytzB4t0/pCyLtD3z8X78ajUc2XAMyhy8/WCevxe3KtELY/2H/hw0OAFZQOCBYBwAAcDIAnQEqAAEAAT5tMpJFpCOhmApkyEAGxLE3cLO/I/8A/AD9APeogBDf+wH8n/gH4AfoB/D8D/bUGJv3b8rvGLBD2z8kv6L+13LBGO/UZ+8+1n5jf2T+j+wDzCP0i/03UP8wn6r/83/Ye+Z/jv7B7I/13/ID5AP6p/eP/Z7WHqRft57AH8g/2XpTfsZ/3fkf/a/9xvgd/XX/4ewB6AHVf9AP4B+AH6V0/6X/37XCnfIaNJCv3UeIbyMN5GG7FGGXSbcdew8aH+Bc8jPujAWD5wlI0vGluc15OXPagLZ8UxFfBAlngGwyAgncED6mebGS9plnBjVRDpz0QjHI7NbLJCxCvFKIDQxTIXTAX5W3acocmvQ/OXAfnjdGNjAFiZXJq+/TbBzGEHdg+muQ+hFFPR2JnvUSNpSYiGR57j5nlFet8+EyOPVmbGlR6r47DqM+38DTtDHOHz36jaDYexg9qHpxk3+59nQC1SOzy4m4u34GLkKXdKCpMehdu53FcSHUZwi37E2m0zNsuu+Q0aSFfuo8Q3kYbyMN5GG6AAD8hoAAAAAAAtSatOsx3/xQJLZJVNwvp3oF/sWdRuskPj8s391kh8d5sIy+9WFVihdPhMB/7oNptL8qeszrOKuQiABZ2qtQOEHxZqWJnio26h/5Vo4krKGOO0sEz6Hv5rWo4fY5j+Zkip4N2flcerdMsQa4aobjtm8EeilMPd1RKe3T2L0a/Cv9h3t065fg6SjwXyBvg2hYN5RVMz/+NTMiir38FhGGkGCU+n7zH7a6z+ZsKOKACwNxEF24gJYE5Z8YDT/LDnQYzNPs7fnw0DWqqrx34u1n8kIsG90iig+vnPhkaVDzEnlMOItgwAov/p/WcPmzTS/ESKniY3/MPn06vQ7wic6l2jPoEyPrpPntLEOo7YMH2DlbEeYrf53exjRiZcoP/Ct0vnxkiHQpZk7RAgR6cllYgknBYToYbOnOoJ31iIPn+tjP9Wi1AD4rjN8FcbwzXiiXhq7Gpi/P39FnE7kEHxM3vJ6IfeN8kET6YhP9+bS4Ndq9z76kggowPI6pCeXTbktqMjiZBKP512NVTKWySMPWK2cDgYHLavMlfOsRt+ejI3anQJ8WOB5Sj4jg+PmmRO0YbrVrKJlMyW9DQhQxUYSb68hxwsa0pT95muv26wX/UbDfNNZqiY+FY3E3IrOLBfx/PvkZag+Z1tU15DmS4oWxI1MZPiItkOO5brQrLJ4gEX/aqDZVYH5qQqEYAdzEawMyn+D8EIsnVFuZBNbvILVTzHSwk5p+TPibb4Bij8La/IJ3+b8zP3KyRwEs7cd4bm+Kk65mOeRHpY//wUWtOhkyo+qCpkP9B+BZwjtgg93EV8KAePfDCEvQd6szMwcXg364KeljltsktN3CEwdKtH5sTvqV4jy9/ySVzVng2aUX/rOq5NjhT5vpK5nuKGWPdhY6RWG/JJfzsRE9JFNVJDLuZWWPyOhtK52DWPq8KkvizSdW1HXse/jEbnWBMMY4QwuO7eYDSR462HFk4zWyEZKugT+Uop5KD4KF+4xpfPgWfPcD4doS/+fEI2HjOrl8ctTT0EKp255SHoe3jdEi6QVMzZoAJS4j6vntijLltNjzgznBcSIrdKAgCdZjX9+VDrtqECJvxN3Z0S2oEoVZf2tji5V6DXamlubRVYpvsgB0DK/ACFIRFc4ggMNRpcbv9wYeN/ch3k6B5RkfzyDTkXb+RkS8ZLdbq1+tq/I8eK9g1/wuAZNhQkc5QmjUYgl3QAo86JmxC4zO0au6U/fSE11r16YB3WkeheQcJUKCbRTGS8OOs30DN7fpcxuuyEU3yM8HEqoyjd1rdQAi9K6LdRvQyLh3A9JmvA1EAjN8i2uLOeZi81BhiwAsZle61AOM78/l7yxuHy40ECHE9YostmdueER4HTWcMMGg0X8eJcujrbN1S4qyGxITPLghrKViISKTyPZ5qSo6EwnWhwA+XrAtKvtNVyS5f2iWAFmfHlQxfB+NXNwhxPwQ2akkMg3wgSifNE4lDRelowqblVu+ByzHBWsJkemp5mw2u/1Wl+RmpkyV7U5cIjxTkKyprg12fSsb7L3rIIHxd4ZgDArHDgESQGwDGC+iMlFrZKAfhcSUxuvPko5fkWdmQ/+oI2RMP4/d0h4/foef6PFHier1x+nRlT+0ZjBnE6Z32aVtbLbEPc+Z/EgAGsRF1RpteYADfJzufleSz65a2AZGpwwvDlrHbL9WdGx4Ww776VnxhYV27x6KH0biDB/mTxsIwv4zQQiKkDjcibMmzgp2PvZ3foNNj+SEDx11+lqzPpayPGMBRlQgr9yVhV93j2rzdy+mYvCoHNBVmA6kubmaB2ilYOI/AMpq6CFrzpkL8UJoedOwyLxtwJIWPjP+a++KkMbgYxBuo7dViu8XMjZ9jD1lMMABEIMT5fvOnzyyoKUFfnseMxeoFZjuySXC9vxk95ivyfGaZgXSPDdaiQAGYrj9aBUAAAAAAAA=" />
</svg>

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@@ -131,6 +131,45 @@
border-top: 1px solid var(--bg-ink-200);
padding: 8px 14px;
.timezone-container {
display: flex;
align-items: center;
justify-content: space-between;
&__left {
display: flex;
align-items: center;
justify-content: center;
gap: 6px;
.timezone__name {
font-size: 12px;
line-height: 16px;
color: var(--bg-robin-400);
font-weight: 500;
}
.timezone__separator {
font-size: 12px;
line-height: 16px;
color: var(--bg-robin-300);
font-weight: 500;
}
.timezone__offset {
font-size: 11px;
line-height: 14px;
color: var(--bg-robin-400);
font-weight: 500;
}
}
&__right {
display: flex;
align-items: center;
justify-content: center;
gap: 6px;
}
&,
.timezone {
font-family: Inter;
@@ -138,6 +177,7 @@
line-height: 16px;
letter-spacing: -0.06px;
}
display: flex;
align-items: center;
color: var(--bg-vanilla-400);
@@ -156,18 +196,21 @@
}
}
}
.timezone-badge {
display: flex;
align-items: center;
justify-content: center;
padding: 0 4px;
border-radius: 2px;
background: rgba(171, 189, 255, 0.04);
color: var(--bg-vanilla-100);
font-size: 12px;
color: var(--bg-vanilla-400);
background-color: var(--bg-ink-200);
font-size: 9px;
font-weight: 400;
line-height: 16px;
letter-spacing: -0.06px;
line-height: 12px;
letter-spacing: -0.045px;
margin-right: 4px;
width: 72px;
cursor: pointer;
}
@@ -183,6 +226,7 @@
font-size: 11px;
color: var(--bg-vanilla-400);
background-color: var(--bg-ink-200);
cursor: pointer;
&.is-live {
background-color: transparent;
@@ -238,11 +282,10 @@
.date-time-popover__footer {
border-color: var(--bg-vanilla-400);
}
.timezone-container {
color: var(--bg-ink-400);
&__clock-icon {
stroke: var(--bg-ink-400);
}
.timezone {
color: var(--bg-ink-100);
background: rgb(179 179 179 / 15%);

View File

@@ -4,7 +4,6 @@
import './CustomTimePicker.styles.scss';
import { Input, InputRef, Popover, Tooltip } from 'antd';
import logEvent from 'api/common/logEvent';
import cx from 'classnames';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
@@ -22,9 +21,7 @@ import {
ChangeEvent,
Dispatch,
SetStateAction,
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from 'react';
@@ -113,22 +110,8 @@ function CustomTimePicker({
const [activeView, setActiveView] = useState<ViewType>(DEFAULT_VIEW);
const { timezone, browserTimezone } = useTimezone();
const { timezone } = useTimezone();
const activeTimezoneOffset = timezone.offset;
const isTimezoneOverridden = useMemo(
() => timezone.offset !== browserTimezone.offset,
[timezone, browserTimezone],
);
const handleViewChange = useCallback(
(newView: 'timezone' | 'datetime'): void => {
if (activeView !== newView) {
setActiveView(newView);
}
setOpen(true);
},
[activeView, setOpen],
);
const [isOpenedFromFooter, setIsOpenedFromFooter] = useState(false);
@@ -371,6 +354,7 @@ function CustomTimePicker({
startTime,
endTime,
DATE_TIME_FORMATS.UK_DATETIME_SECONDS,
timezone.value,
);
if (!isValidTimeRange) {
@@ -422,8 +406,8 @@ function CustomTimePicker({
</div>
);
const handleOpen = (e: React.SyntheticEvent): void => {
e.stopPropagation();
const handleOpen = (e?: React.SyntheticEvent): void => {
e?.stopPropagation?.();
if (showLiveLogs) {
setOpen(true);
@@ -436,12 +420,12 @@ function CustomTimePicker({
// reset the input status and error message as we reset the time to previous correct value
resetErrorStatus();
const startTime = dayjs(minTime / 1000_000).format(
DATE_TIME_FORMATS.UK_DATETIME_SECONDS,
);
const endTime = dayjs(maxTime / 1000_000).format(
DATE_TIME_FORMATS.UK_DATETIME_SECONDS,
);
const startTime = dayjs(minTime / 1000_000)
.tz(timezone.value)
.format(DATE_TIME_FORMATS.UK_DATETIME_SECONDS);
const endTime = dayjs(maxTime / 1000_000)
.tz(timezone.value)
.format(DATE_TIME_FORMATS.UK_DATETIME_SECONDS);
setInputValue(`${startTime} - ${endTime}`);
};
@@ -468,18 +452,6 @@ function CustomTimePicker({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [location.pathname]);
const handleTimezoneHintClick = (e: React.MouseEvent): void => {
e.stopPropagation();
handleViewChange('timezone');
setIsOpenedFromFooter(false);
logEvent(
'DateTimePicker: Timezone picker opened from time range input badge',
{
page: location.pathname,
},
);
};
const handleInputBlur = (): void => {
resetErrorStatus();
};
@@ -498,20 +470,28 @@ function CustomTimePicker({
return '';
};
const focusInput = (): void => {
// Use setTimeout to wait for React to update the DOM and make input editable
setTimeout(() => {
const inputElement = inputRef.current?.input;
if (inputElement) {
inputElement.focus();
inputElement.select();
}
}, 100);
};
// Focus and select input text when popover opens
useEffect(() => {
if (open && inputRef.current) {
// Use setTimeout to wait for React to update the DOM and make input editable
setTimeout(() => {
const inputElement = inputRef.current?.input;
if (inputElement) {
inputElement.focus();
inputElement.select();
}
}, 0);
focusInput();
}
}, [open]);
const handleTimezoneChange = (): void => {
focusInput();
};
return (
<div className="custom-time-picker">
<Tooltip title={getTooltipTitle()} placement="top">
@@ -532,6 +512,7 @@ function CustomTimePicker({
customDateTimeVisible={defaultTo(customDateTimeVisible, false)}
onCustomDateHandler={defaultTo(onCustomDateHandler, noop)}
onSelectHandler={handleSelect}
onTimezoneChange={handleTimezoneChange}
onGoLive={defaultTo(onGoLive, noop)}
onExitLiveLogs={defaultTo(onExitLiveLogs, noop)}
options={items}
@@ -583,8 +564,8 @@ function CustomTimePicker({
prefix={getInputPrefix()}
suffix={
<div className="time-input-suffix">
{!!isTimezoneOverridden && activeTimezoneOffset && (
<div className="timezone-badge" onClick={handleTimezoneHintClick}>
{activeTimezoneOffset && (
<div className="timezone-badge">
<span>{activeTimezoneOffset}</span>
</div>
)}

View File

@@ -31,6 +31,7 @@ import { TimeRangeValidationResult } from 'utils/timeUtils';
import CalendarContainer from './CalendarContainer';
import { CustomTimePickerInputStatus } from './CustomTimePicker';
import TimezonePicker from './TimezonePicker';
import { Timezone } from './timezoneUtils';
const TO_MILLISECONDS_FACTOR = 1000_000;
@@ -52,6 +53,7 @@ interface CustomTimePickerPopoverContentProps {
lexicalContext?: LexicalContext,
) => void;
onSelectHandler: (label: string, value: string) => void;
onTimezoneChange: (timezone: Timezone) => void;
onGoLive: () => void;
selectedTime: string;
activeView: 'datetime' | 'timezone';
@@ -101,6 +103,7 @@ function CustomTimePickerPopoverContent({
setCustomDTPickerVisible,
onCustomDateHandler,
onSelectHandler,
onTimezoneChange,
onGoLive,
selectedTime,
activeView,
@@ -208,6 +211,7 @@ function CustomTimePickerPopoverContent({
setActiveView={setActiveView}
setIsOpen={setIsOpen}
isOpenedFromFooter={isOpenedFromFooter}
onTimezoneSelect={onTimezoneChange}
/>
</div>
);
@@ -352,26 +356,30 @@ function CustomTimePickerPopoverContent({
<div className="date-time-popover__footer">
<div className="timezone-container">
<Clock
color={Color.BG_VANILLA_400}
className="timezone-container__clock-icon"
height={12}
width={12}
/>
<span className="timezone__icon">Current timezone</span>
<div></div>
<button
type="button"
className="timezone"
onClick={handleTimezoneHintClick}
>
<span>{activeTimezoneOffset}</span>
<PenLine
color={Color.BG_VANILLA_100}
className="timezone__icon"
size={10}
<div className="timezone-container__left">
<Clock
color={Color.BG_ROBIN_400}
className="timezone-container__clock-icon"
height={12}
width={12}
/>
</button>
<span className="timezone__name">{timezone.name}</span>
<span className="timezone__separator"></span>
<span className="timezone__offset">{activeTimezoneOffset}</span>
</div>
<div className="timezone-container__right">
<Button
type="text"
size="small"
className="periscope-btn text timezone-change-button"
onClick={handleTimezoneHintClick}
icon={<PenLine size={10} />}
>
Change Timezone
</Button>
</div>
</div>
</div>
</>

View File

@@ -121,12 +121,14 @@ interface TimezonePickerProps {
setActiveView: Dispatch<SetStateAction<'datetime' | 'timezone'>>;
setIsOpen: Dispatch<SetStateAction<boolean>>;
isOpenedFromFooter: boolean;
onTimezoneSelect: (timezone: Timezone) => void;
}
function TimezonePicker({
setActiveView,
setIsOpen,
isOpenedFromFooter,
onTimezoneSelect,
}: TimezonePickerProps): JSX.Element {
const [searchTerm, setSearchTerm] = useState('');
const { timezone, updateTimezone } = useTimezone();
@@ -153,11 +155,11 @@ function TimezonePicker({
}, [isOpenedFromFooter, setActiveView, setIsOpen]);
const handleTimezoneSelect = useCallback(
(timezone: Timezone) => {
(timezone: Timezone): void => {
setSelectedTimezone(timezone.name);
updateTimezone(timezone);
onTimezoneSelect(timezone);
handleCloseTimezonePicker();
setIsOpen(false);
logEvent('DateTimePicker: New Timezone Selected', {
timezone: {
name: timezone.name,
@@ -165,7 +167,7 @@ function TimezonePicker({
},
});
},
[handleCloseTimezonePicker, setIsOpen, updateTimezone],
[handleCloseTimezonePicker, updateTimezone, onTimezoneSelect],
);
// Register keyboard shortcuts
@@ -194,7 +196,7 @@ function TimezonePicker({
<div className="timezone-picker__list">
{getFilteredTimezones(searchTerm).map((timezone) => (
<TimezoneItem
key={timezone.value}
key={`${timezone.value}-${timezone.name}`}
timezone={timezone}
isSelected={timezone.name === selectedTimezone}
onClick={(): void => handleTimezoneSelect(timezone)}

View File

@@ -1,6 +1,4 @@
.span-hover-card {
width: 206px;
.ant-popover-inner {
background: linear-gradient(
139deg,
@@ -60,8 +58,8 @@
display: flex;
justify-content: space-between;
align-items: center;
max-width: 174px;
margin-top: 8px;
gap: 16px;
}
&__label {

View File

@@ -4,6 +4,7 @@ import { Popover, Typography } from 'antd';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { convertTimeToRelevantUnit } from 'container/TraceDetail/utils';
import dayjs from 'dayjs';
import { useTimezone } from 'providers/Timezone';
import { ReactNode } from 'react';
import { Span } from 'types/api/trace/getTraceV2';
import { toFixed } from 'utils/toFixed';
@@ -29,6 +30,8 @@ function SpanHoverCard({
duration,
);
const { timezone } = useTimezone();
// Calculate relative start time from trace start
const relativeStartTime = span.timestamp - traceMetadata.startTime;
const {
@@ -37,9 +40,9 @@ function SpanHoverCard({
} = convertTimeToRelevantUnit(relativeStartTime);
// Format absolute start time
const startTimeFormatted = dayjs(span.timestamp).format(
DATE_TIME_FORMATS.SPAN_POPOVER_DATE,
);
const startTimeFormatted = dayjs(span.timestamp)
.tz(timezone.value)
.format(DATE_TIME_FORMATS.DD_MMM_YYYY_HH_MM_SS);
const getContent = (): JSX.Element => (
<div className="span-hover-card">
@@ -87,7 +90,7 @@ function SpanHoverCard({
</Typography.Text>
</div>
}
mouseEnterDelay={0.5}
mouseEnterDelay={0.2}
content={getContent()}
trigger="hover"
rootClassName="span-hover-card"

View File

@@ -2,21 +2,54 @@ import { act, fireEvent, render, screen } from '@testing-library/react';
import { Span } from 'types/api/trace/getTraceV2';
import SpanHoverCard from '../SpanHoverCard';
import { TimezoneContextType } from 'providers/Timezone';
// Mock dayjs completely for testing
// Mock timezone provider so SpanHoverCard can use useTimezone without a real context
jest.mock('providers/Timezone', () => ({
__esModule: true,
useTimezone: (): TimezoneContextType => ({
timezone: {
name: 'Coordinated Universal Time — UTC, GMT',
value: 'UTC',
offset: 'UTC',
searchIndex: 'UTC',
},
browserTimezone: {
name: 'Coordinated Universal Time — UTC, GMT',
value: 'UTC',
offset: 'UTC',
searchIndex: 'UTC',
},
updateTimezone: jest.fn(),
formatTimezoneAdjustedTimestamp: jest.fn(() => 'mock-date'),
isAdaptationEnabled: true,
setIsAdaptationEnabled: jest.fn(),
}),
}));
// Mock dayjs for testing, including timezone helpers used in timezoneUtils
jest.mock('dayjs', () => {
const mockDayjs = jest.fn(() => ({
format: jest.fn((formatString: string) => {
if (formatString === 'D/M/YY - HH:mm:ss') {
return '15/3/24 - 14:23:45';
}
return 'mock-date';
}),
}));
const mockDayjsInstance: any = {};
mockDayjsInstance.format = jest.fn((formatString: string) =>
// Match the DD_MMM_YYYY_HH_MM_SS format: 'DD MMM YYYY, HH:mm:ss'
formatString === 'DD MMM YYYY, HH:mm:ss'
? '15 Mar 2024, 14:23:45'
: 'mock-date',
);
// Support chaining: dayjs().tz(timezone).format(...) and dayjs().tz(timezone).utcOffset()
mockDayjsInstance.tz = jest.fn(() => mockDayjsInstance);
mockDayjsInstance.utcOffset = jest.fn(() => 0);
const mockDayjs = jest.fn(() => mockDayjsInstance);
Object.assign(mockDayjs, {
extend: jest.fn(),
// Support dayjs.tz.guess()
tz: { guess: jest.fn(() => 'UTC') },
});
return mockDayjs;
});
@@ -84,7 +117,7 @@ describe('SpanHoverCard', () => {
expect(screen.getByText('Hover me')).toBeInTheDocument();
});
it('shows popover after 0.5 second delay on hover', async () => {
it('shows popover after 0.2 second delay on hover', async () => {
render(
<SpanHoverCard span={mockSpan} traceMetadata={mockTraceMetadata}>
<div data-testid={HOVER_ELEMENT_ID}>Hover for details</div>
@@ -101,7 +134,7 @@ describe('SpanHoverCard', () => {
// Advance time by 0.5 seconds
act(() => {
jest.advanceTimersByTime(500);
jest.advanceTimersByTime(200);
});
// Now popover should appear
@@ -117,10 +150,10 @@ describe('SpanHoverCard', () => {
const hoverElement = screen.getByTestId(HOVER_ELEMENT_ID);
// Quick hover and unhover
// Quick hover and unhover (less than the 0.2s delay)
fireEvent.mouseEnter(hoverElement);
act(() => {
jest.advanceTimersByTime(200); // Only 0.2 seconds
jest.advanceTimersByTime(100); // Only 0.1 seconds
});
fireEvent.mouseLeave(hoverElement);
@@ -163,7 +196,7 @@ describe('SpanHoverCard', () => {
expect(screen.getByText('Start time:')).toBeInTheDocument();
});
it('displays new date format with seconds', async () => {
it('displays date in DD MMM YYYY, HH:mm:ss format with seconds', async () => {
render(
<SpanHoverCard span={mockSpan} traceMetadata={mockTraceMetadata}>
<div data-testid={HOVER_ELEMENT_ID}>Date format test</div>
@@ -178,8 +211,8 @@ describe('SpanHoverCard', () => {
jest.advanceTimersByTime(500);
});
// Verify the new date format is displayed
expect(screen.getByText('15/3/24 - 14:23:45')).toBeInTheDocument();
// Verify the DD MMM YYYY, HH:mm:ss format is displayed
expect(screen.getByText('15 Mar 2024, 14:23:45')).toBeInTheDocument();
});
it('displays relative time information', async () => {

View File

@@ -26,7 +26,6 @@ import { ApiMonitoringHardcodedAttributeKeys } from '../../constants';
import { DEFAULT_PARAMS, useApiMonitoringParams } from '../../queryParams';
import { columnsConfig, formatDataForTable } from '../../utils';
import DomainDetails from './DomainDetails/DomainDetails';
import DOCLINKS from 'utils/docLinks';
function DomainList(): JSX.Element {
const [params, setParams] = useApiMonitoringParams();
@@ -146,17 +145,7 @@ function DomainList(): JSX.Element {
/>
<Typography.Text className="no-filtered-domains-message">
No External API calls detected. To automatically detect them, ensure
Client spans are being sent with required attributes.
<br />
Read more about <span> </span>
<a
href={DOCLINKS.EXTERNAL_API_MONITORING}
target="_blank"
rel="noreferrer"
>
configuring External API monitoring.
</a>
This query had no results. Edit your query and try again!
</Typography.Text>
</div>
</div>

View File

@@ -295,7 +295,8 @@
"otel",
"otel collector",
"otlp",
"signoz"
"signoz",
"open telemetry"
],
"imgUrl": "/Logos/opentelemetry.svg",
"link": "/docs/migration/migrate-from-opentelemetry-to-signoz/"
@@ -591,6 +592,7 @@
"js",
"nestjs",
"nextjs",
"next.js",
"nodejs",
"nuxtjs",
"reactjs",
@@ -2212,7 +2214,11 @@
"observability",
"otel hostmetrics receiver",
"system metrics monitoring",
"vm performance metrics"
"vm performance metrics",
"ubuntu",
"windows",
"macos",
"linux"
],
"imgUrl": "/Logos/hostmetrics.svg",
"link": "/docs/userguide/hostmetrics/"
@@ -3580,6 +3586,140 @@
],
"link": "/docs/llm/opentelemetry-openai-monitoring/"
},
{
"dataSource": "codex-monitoring",
"label": "Codex",
"imgUrl": "/Logos/openai.svg",
"tags": [
"LLM Monitoring"
],
"module": "apm",
"relatedSearchKeywords": [
"codex",
"codex monitoring",
"codex observability",
"llm",
"llm monitoring",
"monitoring",
"observability",
"otel codex",
"traces",
"tracing"
],
"link": "/docs/codex-monitoring/"
},
{
"dataSource": "groq-observability",
"label": "Groq",
"imgUrl": "/Logos/groq.svg",
"tags": [
"LLM Monitoring"
],
"module": "apm",
"relatedSearchKeywords": [
"groq",
"groq monitoring",
"groq observability",
"groq api monitoring",
"llm",
"llm monitoring",
"monitoring",
"observability",
"otel groq",
"traces",
"tracing"
],
"link": "/docs/groq-observability/"
},
{
"dataSource": "google-adk-observability",
"label": "Google ADK",
"imgUrl": "/Logos/google-adk.svg",
"tags": [
"LLM Monitoring"
],
"module": "apm",
"relatedSearchKeywords": [
"adk",
"google adk",
"google adk monitoring",
"google adk observability",
"llm",
"llm monitoring",
"monitoring",
"observability",
"otel google adk",
"traces",
"tracing"
],
"link": "/docs/google-adk-observability/"
},
{
"dataSource": "openlit",
"label": "OpenLit",
"imgUrl": "/Logos/openlit.svg",
"tags": [
"LLM Monitoring"
],
"module": "apm",
"relatedSearchKeywords": [
"llm",
"llm monitoring",
"monitoring",
"observability",
"openlit",
"openlit monitoring",
"openlit observability",
"otel openlit",
"traces",
"tracing"
],
"link": "/docs/openlit/"
},
{
"dataSource": "langtrace",
"label": "Langtrace",
"imgUrl": "/Logos/langtrace.svg",
"tags": [
"LLM Monitoring"
],
"module": "apm",
"relatedSearchKeywords": [
"langtrace",
"langtrace monitoring",
"langtrace observability",
"llm",
"llm monitoring",
"monitoring",
"observability",
"otel langtrace",
"traces",
"tracing"
],
"link": "/docs/langtrace/"
},
{
"dataSource": "traceloop",
"label": "Traceloop",
"imgUrl": "/Logos/traceloop.svg",
"tags": [
"LLM Monitoring"
],
"module": "apm",
"relatedSearchKeywords": [
"llm",
"llm monitoring",
"monitoring",
"observability",
"otel traceloop",
"traces",
"tracing",
"traceloop",
"traceloop monitoring",
"traceloop observability"
],
"link": "/docs/traceloop/"
},
{
"dataSource": "anthropic-api",
"label": "Anthropic API",
@@ -4446,7 +4586,8 @@
"nuxtjs",
"svelte",
"sveltekit",
"nextjs"
"nextjs",
"next.js"
],
"link": "/docs/frontend-monitoring/web-vitals-with-metrics/"
},
@@ -4480,7 +4621,8 @@
"nuxtjs",
"svelte",
"sveltekit",
"nextjs"
"nextjs",
"next.js"
],
"link": "/docs/frontend-monitoring/web-vitals-with-traces/"
},
@@ -4521,7 +4663,8 @@
"nuxtjs",
"svelte",
"sveltekit",
"nextjs"
"nextjs",
"next.js"
],
"link": "/docs/frontend-monitoring/document-load/"
},
@@ -5430,7 +5573,8 @@
"nuxtjs",
"svelte",
"sveltekit",
"nextjs"
"nextjs",
"next.js"
],
"link": "/docs/frontend-monitoring/sending-logs-with-opentelemetry/"
},
@@ -5460,7 +5604,8 @@
"nuxtjs",
"svelte",
"sveltekit",
"nextjs"
"nextjs",
"next.js"
],
"link": "/docs/frontend-monitoring/sending-traces-with-opentelemetry/"
},
@@ -5490,7 +5635,8 @@
"nuxtjs",
"svelte",
"sveltekit",
"nextjs"
"nextjs",
"next.js"
],
"link": "/docs/frontend-monitoring/sending-metrics-with-opentelemetry/"
},
@@ -5590,5 +5736,173 @@
}
]
}
},
{
"dataSource": "python-metrics",
"label": "Python Metrics",
"imgUrl": "/Logos/python.svg",
"tags": [
"apm/traces",
"metrics"
],
"module": "metrics",
"relatedSearchKeywords": [
"metrics",
"opentelemetry python",
"otel python",
"python",
"python metrics",
"python monitoring",
"python observability",
"python opentelemetry",
"runtime metrics"
],
"id": "python-metrics",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-python/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-python/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-python/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-python/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-python/"
}
]
}
},
{
"dataSource": "dotnet-metrics",
"label": ".NET Metrics",
"imgUrl": "/Logos/dotnet.svg",
"tags": [
"apm/traces",
"metrics"
],
"module": "metrics",
"relatedSearchKeywords": [
".net",
".net metrics",
".net monitoring",
".net observability",
"dotnet",
"dotnet metrics",
"dotnet monitoring",
"dotnet observability",
"metrics",
"opentelemetry dotnet",
"otel dotnet",
"runtime metrics"
],
"id": "dotnet-metrics",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-dotnet/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-dotnet/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-dotnet/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-dotnet/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-dotnet/"
}
]
}
},
{
"dataSource": "nodejs-metrics",
"label": "Node.js Metrics",
"imgUrl": "/Logos/nodejs.svg",
"tags": [
"apm/traces",
"metrics"
],
"module": "metrics",
"relatedSearchKeywords": [
"metrics",
"node.js",
"node.js metrics",
"node.js monitoring",
"node.js observability",
"nodejs",
"nodejs metrics",
"nodejs monitoring",
"nodejs observability",
"opentelemetry nodejs",
"otel nodejs",
"runtime metrics"
],
"id": "nodejs-metrics",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-nodejs/",
"question": {
"desc": "What is your Environment?",
"type": "select",
"entityID": "environment",
"options": [
{
"key": "vm",
"label": "VM",
"imgUrl": "/Logos/vm.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-nodejs/"
},
{
"key": "k8s",
"label": "Kubernetes",
"imgUrl": "/Logos/kubernetes.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-nodejs/"
},
{
"key": "windows",
"label": "Windows",
"imgUrl": "/Logos/windows.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-nodejs/"
},
{
"key": "docker",
"label": "Docker",
"imgUrl": "/Logos/docker.svg",
"link": "/docs/metrics-management/send-metrics/applications/opentelemetry-nodejs/"
}
]
}
}
]
]

View File

@@ -42,7 +42,7 @@
.trace-id {
color: #fff;
font-family: Inter;
font-size: 14px;
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 128.571% */
@@ -59,7 +59,7 @@
background: var(--bg-slate-400);
color: var(--Vanilla-400, #c0c1c3);
font-family: Inter;
font-size: 14px;
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 128.571% */
@@ -83,7 +83,7 @@
.text {
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 14px;
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
@@ -110,7 +110,7 @@
.text {
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 14px;
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
@@ -127,7 +127,7 @@
.text {
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 14px;
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
@@ -156,7 +156,7 @@
color: var(--bg-vanilla-400);
text-align: center;
font-family: Inter;
font-size: 14px;
font-size: 13px;
font-style: normal;
font-weight: 400;
line-height: 18px; /* 128.571% */

View File

@@ -1,8 +1,10 @@
import './TraceMetadata.styles.scss';
import { Button, Tooltip, Typography } from 'antd';
import { Button, Skeleton, Tooltip, Typography } from 'antd';
import { getYAxisFormattedValue } from 'components/Graph/yAxisConfig';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import ROUTES from 'constants/routes';
import dayjs from 'dayjs';
import history from 'lib/history';
import {
ArrowLeft,
@@ -11,7 +13,8 @@ import {
DraftingCompass,
Timer,
} from 'lucide-react';
import { formatEpochTimestamp } from 'utils/timeUtils';
import { useTimezone } from 'providers/Timezone';
import { useMemo } from 'react';
export interface ITraceMetadataProps {
traceID: string;
@@ -22,6 +25,7 @@ export interface ITraceMetadataProps {
totalSpans: number;
totalErrorSpans: number;
notFound: boolean;
isDataLoading: boolean;
}
function TraceMetadata(props: ITraceMetadataProps): JSX.Element {
@@ -34,8 +38,19 @@ function TraceMetadata(props: ITraceMetadataProps): JSX.Element {
totalErrorSpans,
totalSpans,
notFound,
isDataLoading,
} = props;
const { timezone } = useTimezone();
const startTimeInMs = useMemo(
() =>
dayjs(startTime * 1e3)
.tz(timezone.value)
.format(DATE_TIME_FORMATS.DD_MMM_YYYY_HH_MM_SS),
[startTime, timezone.value],
);
const handlePreviousBtnClick = (): void => {
if (window.history.length > 1) {
history.goBack();
@@ -57,7 +72,18 @@ function TraceMetadata(props: ITraceMetadataProps): JSX.Element {
</div>
<Typography.Text className="trace-id-value">{traceID}</Typography.Text>
</div>
{!notFound && (
{isDataLoading && (
<div className="second-row">
<div className="service-entry-info">
<BetweenHorizonalStart size={14} />
<Skeleton.Input active className="skeleton-input" size="small" />
<Skeleton.Input active className="skeleton-input" size="small" />
<Skeleton.Input active className="skeleton-input" size="small" />
</div>
</div>
)}
{!isDataLoading && !notFound && (
<div className="second-row">
<div className="service-entry-info">
<BetweenHorizonalStart size={14} />
@@ -79,8 +105,9 @@ function TraceMetadata(props: ITraceMetadataProps): JSX.Element {
<Tooltip title="Start timestamp">
<CalendarClock size={14} />
</Tooltip>
<Typography.Text className="text">
{formatEpochTimestamp(startTime * 1000)}
{startTimeInMs || 'N/A'}
</Typography.Text>
</div>
</div>

View File

@@ -135,6 +135,7 @@ function TraceDetailsV2(): JSX.Element {
<ResizablePanel minSize={20} maxSize={80} className="trace-left-content">
<TraceMetadata
traceID={traceId}
isDataLoading={isFetchingTraceData}
duration={
(traceData?.payload?.endTimestampMillis || 0) -
(traceData?.payload?.startTimestampMillis || 0)

View File

@@ -198,6 +198,14 @@
border-color: var(--bg-vanilla-300);
background: var(--bg-vanilla-100);
color: var(--bg-ink-200);
&.text {
color: var(--bg-ink-200) !important;
&:hover {
color: var(--bg-ink-300) !important;
}
}
}
.periscope-input-with-label {

View File

@@ -18,7 +18,7 @@ import React, {
useState,
} from 'react';
interface TimezoneContextType {
export interface TimezoneContextType {
timezone: Timezone;
browserTimezone: Timezone;
updateTimezone: (timezone: Timezone) => void;

View File

@@ -6,8 +6,6 @@ const DOCLINKS = {
'https://signoz.io/docs/product-features/trace-explorer/?utm_source=product&utm_medium=traces-explorer-trace-tab#traces-view',
METRICS_EXPLORER_EMPTY_STATE:
'https://signoz.io/docs/userguide/send-metrics-cloud/',
EXTERNAL_API_MONITORING:
'https://signoz.io/docs/external-api-monitoring/overview/',
};
export default DOCLINKS;

View File

@@ -2,10 +2,13 @@ import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import dayjs from 'dayjs';
import customParseFormat from 'dayjs/plugin/customParseFormat';
import duration from 'dayjs/plugin/duration';
import timezone from 'dayjs/plugin/timezone';
import utc from 'dayjs/plugin/utc';
dayjs.extend(utc);
dayjs.extend(customParseFormat);
dayjs.extend(duration);
dayjs.extend(timezone);
export function toUTCEpoch(time: number): number {
const x = new Date();
@@ -213,10 +216,11 @@ export const validateTimeRange = (
startTime: string,
endTime: string,
format: string,
timezone: string,
): TimeRangeValidationResult => {
const start = dayjs(startTime, format, true);
const end = dayjs(endTime, format, true);
const now = dayjs();
const start = dayjs.tz(startTime, format, timezone);
const end = dayjs.tz(endTime, format, timezone);
const now = dayjs().tz(timezone);
const startTimeMs = start.valueOf();
const endTimeMs = end.valueOf();

View File

@@ -0,0 +1,308 @@
package querybuilder
import (
"fmt"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
// AdjustDuplicateKeys adjusts duplicate keys in the query by removing specific context and data type
// if the same key appears with different contexts or data types across SelectFields, GroupBy, and OrderBy.
// This ensures that each key is unique and generic enough to cover all its usages in the query.
func AdjustDuplicateKeys[T any](query *qbtypes.QueryBuilderQuery[T]) []string {
// Create a map to track unique keys across SelectFields, GroupBy, and OrderBy
globalUniqueKeysMap := map[string]telemetrytypes.TelemetryFieldKey{}
// for recording modifications
actions := []string{}
// SelectFields
for _, key := range query.SelectFields {
deduplicateKeys(key, globalUniqueKeysMap, &actions)
}
// GroupBy
for _, key := range query.GroupBy {
deduplicateKeys(key.TelemetryFieldKey, globalUniqueKeysMap, &actions)
}
// OrderBy
for _, key := range query.Order {
deduplicateKeys(key.Key.TelemetryFieldKey, globalUniqueKeysMap, &actions)
}
// Reconstruct SelectFields slice
newSelectFields := make([]telemetrytypes.TelemetryFieldKey, 0, len(query.SelectFields))
seen := map[string]bool{}
for _, key := range query.SelectFields {
if !seen[key.Name] {
newSelectFields = append(newSelectFields, globalUniqueKeysMap[key.Name])
seen[key.Name] = true
} else {
actions = append(actions, fmt.Sprintf("Skipped duplicate SelectField key %s", key))
}
}
query.SelectFields = newSelectFields
// Reconstruct GroupBy slice
newGroupBy := make([]qbtypes.GroupByKey, 0, len(query.GroupBy))
seen = map[string]bool{}
for _, key := range query.GroupBy {
if !seen[key.Name] {
newGroupBy = append(newGroupBy, qbtypes.GroupByKey{TelemetryFieldKey: globalUniqueKeysMap[key.Name]})
seen[key.Name] = true
} else {
actions = append(actions, fmt.Sprintf("Skipped duplicate GroupBy key %s", key))
}
}
query.GroupBy = newGroupBy
// Reconstruct OrderBy slice
// NOTE: 1 Edge case here is that if there are two order by on same key with different directions,
// we will only keep one of them (the first one encountered). This is acceptable as such queries
// don't make much sense.
newOrderBy := make([]qbtypes.OrderBy, 0, len(query.Order))
seen = map[string]bool{}
for _, key := range query.Order {
if !seen[key.Key.Name] {
newOrderBy = append(newOrderBy, qbtypes.OrderBy{Key: qbtypes.OrderByKey{TelemetryFieldKey: globalUniqueKeysMap[key.Key.Name]}, Direction: key.Direction})
seen[key.Key.Name] = true
} else {
actions = append(actions, fmt.Sprintf("Skipped duplicate OrderBy key %s", key.Key))
}
}
query.Order = newOrderBy
return actions
}
func deduplicateKeys(key telemetrytypes.TelemetryFieldKey, keysMap map[string]telemetrytypes.TelemetryFieldKey, actions *[]string) {
if existingKey, ok := keysMap[key.Name]; !ok {
keysMap[key.Name] = key
} else {
if existingKey.FieldContext != key.FieldContext && existingKey.FieldContext != telemetrytypes.FieldContextUnspecified {
// remove field context in the map to make it generic
*actions = append(*actions, fmt.Sprintf("Removed field context from %s for duplicate key %s", existingKey, key))
existingKey.FieldContext = telemetrytypes.FieldContextUnspecified
}
if existingKey.FieldDataType != key.FieldDataType && existingKey.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
// remove field data type in the map to make it generic
*actions = append(*actions, fmt.Sprintf("Removed field data type from %s for duplicate key %s", existingKey, key))
existingKey.FieldDataType = telemetrytypes.FieldDataTypeUnspecified
}
// Update the map with the modified key
keysMap[key.Name] = existingKey
}
}
func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey, intrinsicOrCalculatedField *telemetrytypes.TelemetryFieldKey) []string {
// for recording modifications
actions := []string{}
if intrinsicOrCalculatedField != nil {
/*
Check if it also matches with any of the metadata keys
For example, lets consider trace_id exists in attributes and is also an intrinsic field
Now if user is using trace_id, we don't know if they mean intrinsic field or attribute.trace_id
So we cannot take a call here (we'll leave this upto query builder to decide).
However, if user is using attribute.trace_id, we can safely assume they mean attribute field
and not intrinsic field.
Similarly, if user is using trace_id with a field context or data type that doesn't match
the intrinsic field, and there is no matching key in the metadata with the same name,
we can safely assume they mean the intrinsic field and override the context and data type.
*/
// Check if there is any matching key in the metadata with the same name and it is not the same intrinsic/calculated field
match := false
for _, mapKey := range keys[key.Name] {
// Either field context is unspecified or matches
// and
// Either field data type is unspecified or matches
if (key.FieldContext == telemetrytypes.FieldContextUnspecified || mapKey.FieldContext == key.FieldContext) &&
(key.FieldDataType == telemetrytypes.FieldDataTypeUnspecified || mapKey.FieldDataType == key.FieldDataType) &&
!mapKey.Equal(intrinsicOrCalculatedField) {
match = true
break
}
}
// NOTE: if a user is highly opinionated and use attribute.duration_nano:string
// It will be defaulted to intrinsic field duration_nano as the actual attribute might be attribute.duration_nano:number
// We don't have a match, then it doesn't exist in attribute or resource attribute
// use the intrinsic/calculated field
if !match {
// This is the case where user is using an intrinsic/calculated field
// with a context or data type that may or may not match the intrinsic/calculated field
// and there is no matching key in the metadata with the same name
// So we can safely override the context and data type
actions = append(actions, fmt.Sprintf("Overriding key: %s to %s", key, intrinsicOrCalculatedField))
key.FieldContext = intrinsicOrCalculatedField.FieldContext
key.FieldDataType = intrinsicOrCalculatedField.FieldDataType
key.JSONDataType = intrinsicOrCalculatedField.JSONDataType
key.Indexes = intrinsicOrCalculatedField.Indexes
key.Materialized = intrinsicOrCalculatedField.Materialized
return actions
}
}
// This means that the key provided by the user cannot be overridden to a single field because of ambiguity
// So we need to look into metadata keys to find the best match
// check if all the keys for the given field with matching context and data type
matchingKeys := []*telemetrytypes.TelemetryFieldKey{}
for _, metadataKey := range keys[key.Name] {
// Only consider keys that match the context and data type (if specified)
if (key.FieldContext == telemetrytypes.FieldContextUnspecified || key.FieldContext == metadataKey.FieldContext) &&
(key.FieldDataType == telemetrytypes.FieldDataTypeUnspecified || key.FieldDataType == metadataKey.FieldDataType) {
matchingKeys = append(matchingKeys, metadataKey)
}
}
// Also consider if context is actually part of the key name
contextPrefixedMatchingKeys := []*telemetrytypes.TelemetryFieldKey{}
if key.FieldContext != telemetrytypes.FieldContextUnspecified {
for _, metadataKey := range keys[key.FieldContext.StringValue()+"."+key.Name] {
// Since we prefixed the context in the name, we only need to match data type
if key.FieldDataType == telemetrytypes.FieldDataTypeUnspecified || key.FieldDataType == metadataKey.FieldDataType {
contextPrefixedMatchingKeys = append(contextPrefixedMatchingKeys, metadataKey)
}
}
}
if len(matchingKeys)+len(contextPrefixedMatchingKeys) == 0 {
// we do not have any matching keys, most likely user made a mistake, let downstream query builder handle it
// Set materialized to false explicitly to avoid QB looking for materialized column
key.Materialized = false
} else if len(matchingKeys)+len(contextPrefixedMatchingKeys) == 1 {
// only one matching key, use it
var matchingKey *telemetrytypes.TelemetryFieldKey
if len(matchingKeys) == 1 {
matchingKey = matchingKeys[0]
} else {
matchingKey = contextPrefixedMatchingKeys[0]
}
if !key.Equal(matchingKey) {
actions = append(actions, fmt.Sprintf("Adjusting key %s to %s", key, matchingKey))
}
key.Name = matchingKey.Name
key.FieldContext = matchingKey.FieldContext
key.FieldDataType = matchingKey.FieldDataType
key.JSONDataType = matchingKey.JSONDataType
key.Indexes = matchingKey.Indexes
key.Materialized = matchingKey.Materialized
return actions
} else {
// multiple matching keys, set materialized only if all the keys are materialized
// TODO: This could all be redundant if it is not, it should be.
// Downstream query builder should handle multiple matching keys with their own metadata
// and not rely on this function to do so.
materialized := true
indexes := []telemetrytypes.JSONDataTypeIndex{}
fieldContextsSeen := map[telemetrytypes.FieldContext]bool{}
dataTypesSeen := map[telemetrytypes.FieldDataType]bool{}
jsonTypesSeen := map[string]*telemetrytypes.JSONDataType{}
for _, matchingKey := range matchingKeys {
materialized = materialized && matchingKey.Materialized
fieldContextsSeen[matchingKey.FieldContext] = true
dataTypesSeen[matchingKey.FieldDataType] = true
if matchingKey.JSONDataType != nil {
jsonTypesSeen[matchingKey.JSONDataType.StringValue()] = matchingKey.JSONDataType
}
indexes = append(indexes, matchingKey.Indexes...)
}
for _, matchingKey := range contextPrefixedMatchingKeys {
materialized = materialized && matchingKey.Materialized
fieldContextsSeen[matchingKey.FieldContext] = true
dataTypesSeen[matchingKey.FieldDataType] = true
if matchingKey.JSONDataType != nil {
jsonTypesSeen[matchingKey.JSONDataType.StringValue()] = matchingKey.JSONDataType
}
indexes = append(indexes, matchingKey.Indexes...)
}
key.Materialized = materialized
if len(indexes) > 0 {
key.Indexes = indexes
}
if len(fieldContextsSeen) == 1 && key.FieldContext == telemetrytypes.FieldContextUnspecified {
// all matching keys have same field context, use it
for context := range fieldContextsSeen {
actions = append(actions, fmt.Sprintf("Adjusting key %s to have field context %s", key, context.StringValue()))
key.FieldContext = context
break
}
}
if len(dataTypesSeen) == 1 && key.FieldDataType == telemetrytypes.FieldDataTypeUnspecified {
// all matching keys have same data type, use it
for dt := range dataTypesSeen {
actions = append(actions, fmt.Sprintf("Adjusting key %s to have data type %s", key, dt.StringValue()))
key.FieldDataType = dt
break
}
}
if len(jsonTypesSeen) == 1 && key.JSONDataType == nil {
// all matching keys have same JSON data type, use it
for _, jt := range jsonTypesSeen {
actions = append(actions, fmt.Sprintf("Adjusting key %s to have JSON data type %s", key, jt.StringValue()))
key.JSONDataType = jt
break
}
}
}
return actions
}
func AdjustKeysForAliasExpressions[T any](query *qbtypes.QueryBuilderQuery[T], requestType qbtypes.RequestType) []string {
/*
For example, if user is using `body.count` as an alias for aggregation and
Uses it in orderBy, upstream code will convert it to just `count` with fieldContext as Body
But we need to adjust it back to `body.count` with fieldContext as unspecified
*/
actions := []string{}
if requestType != qbtypes.RequestTypeRaw && requestType != qbtypes.RequestTypeRawStream {
aliasExpressions := map[string]bool{}
for _, agg := range query.Aggregations {
switch v := any(agg).(type) {
case qbtypes.LogAggregation:
if v.Alias != "" {
aliasExpressions[v.Alias] = true
}
case qbtypes.TraceAggregation:
if v.Alias != "" {
aliasExpressions[v.Alias] = true
}
default:
continue
}
}
if len(aliasExpressions) > 0 {
for idx := range query.Order {
contextPrefixedKeyName := fmt.Sprintf("%s.%s", query.Order[idx].Key.FieldContext.StringValue(), query.Order[idx].Key.Name)
if aliasExpressions[contextPrefixedKeyName] {
actions = append(actions, fmt.Sprintf("Adjusting OrderBy key %s to %s", query.Order[idx].Key, contextPrefixedKeyName))
query.Order[idx].Key.FieldContext = telemetrytypes.FieldContextUnspecified
query.Order[idx].Key.Name = contextPrefixedKeyName
}
}
}
}
return actions
}

View File

@@ -0,0 +1,610 @@
package querybuilder
import (
"testing"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/stretchr/testify/assert"
)
func TestAdjustDuplicateKeys(t *testing.T) {
tests := []struct {
name string
query qbtypes.QueryBuilderQuery[any]
expectedQuery qbtypes.QueryBuilderQuery[any]
expectedActions []string
description string
}{
{
name: "no duplicates - should remain unchanged",
query: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "field1", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
{Name: "field2", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeNumber},
},
GroupBy: []qbtypes.GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "field3", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString}},
},
Order: []qbtypes.OrderBy{
{Key: qbtypes.OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "field4", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeNumber}}},
},
},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "field1", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
{Name: "field2", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeNumber},
},
GroupBy: []qbtypes.GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "field3", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString}},
},
Order: []qbtypes.OrderBy{
{Key: qbtypes.OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "field4", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeNumber}}},
},
},
expectedActions: []string{},
description: "No duplicate keys - fields should remain unchanged",
},
{
name: "duplicate in SelectFields with different context",
query: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "duration", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeNumber},
{Name: "duration", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeNumber},
},
},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "duration", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeNumber},
},
GroupBy: []qbtypes.GroupByKey{},
Order: []qbtypes.OrderBy{},
},
expectedActions: []string{
"Removed field context from name=duration,context=attribute,datatype=number for duplicate key name=duration,context=resource,datatype=number",
"Skipped duplicate SelectField key name=duration,context=resource,datatype=number",
},
description: "Duplicate key with different context should be merged with unspecified context",
},
{
name: "duplicate in SelectFields with different data type",
query: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "value", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
{Name: "value", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeNumber},
},
},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "value", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeUnspecified},
},
GroupBy: []qbtypes.GroupByKey{},
Order: []qbtypes.OrderBy{},
},
expectedActions: []string{
"Removed field data type from name=value,context=attribute,datatype=string for duplicate key name=value,context=attribute,datatype=number",
"Skipped duplicate SelectField key name=value,context=attribute,datatype=number",
},
description: "Duplicate key with different data type should be merged with unspecified data type",
},
{
name: "duplicate in SelectFields with different context and data type",
query: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "field", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
{Name: "field", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeNumber},
},
},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "field", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeUnspecified},
},
GroupBy: []qbtypes.GroupByKey{},
Order: []qbtypes.OrderBy{},
},
expectedActions: []string{
"Removed field context from name=field,context=attribute,datatype=string for duplicate key name=field,context=resource,datatype=number",
"Removed field data type from name=field,datatype=string for duplicate key name=field,context=resource,datatype=number",
"Skipped duplicate SelectField key name=field,context=resource,datatype=number",
},
description: "Duplicate key with different context and data type should be merged with both unspecified",
},
{
name: "duplicate across SelectFields and GroupBy",
query: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "service", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
},
GroupBy: []qbtypes.GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeString}},
},
},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "service", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeString},
},
GroupBy: []qbtypes.GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeString}},
},
Order: []qbtypes.OrderBy{},
},
expectedActions: []string{
"Removed field context from name=service,context=attribute,datatype=string for duplicate key name=service,context=resource,datatype=string",
},
description: "Duplicate across SelectFields and GroupBy with different context should be merged",
},
{
name: "duplicate across SelectFields, GroupBy, and OrderBy",
query: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "timestamp", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeNumber},
},
GroupBy: []qbtypes.GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeNumber}},
},
Order: []qbtypes.OrderBy{
{Key: qbtypes.OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString}}},
},
},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "timestamp", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeUnspecified},
},
GroupBy: []qbtypes.GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeUnspecified}},
},
Order: []qbtypes.OrderBy{
{Key: qbtypes.OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "timestamp", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeUnspecified}}},
},
},
expectedActions: []string{
"Removed field context from name=timestamp,context=attribute,datatype=number for duplicate key name=timestamp,context=resource,datatype=number",
"Removed field data type from name=timestamp,datatype=number for duplicate key name=timestamp,context=attribute,datatype=string",
},
description: "Duplicate across all three sections with different contexts and data types should be fully merged",
},
{
name: "multiple duplicates in OrderBy - keeps first occurrence",
query: qbtypes.QueryBuilderQuery[any]{
Order: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "field", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString}},
Direction: qbtypes.OrderDirectionAsc,
},
{
Key: qbtypes.OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "field", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeString}},
Direction: qbtypes.OrderDirectionDesc,
},
},
},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{},
GroupBy: []qbtypes.GroupByKey{},
Order: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "field", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeString}},
Direction: qbtypes.OrderDirectionAsc,
},
},
},
expectedActions: []string{
"Removed field context from name=field,context=attribute,datatype=string for duplicate key name=field,context=resource,datatype=string",
"Skipped duplicate OrderBy key name=field,context=resource,datatype=string",
},
description: "Multiple OrderBy on same key keeps first occurrence and merges contexts",
},
{
name: "three duplicate entries in SelectFields",
query: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "status", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString},
{Name: "status", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeString},
{Name: "status", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeNumber},
},
},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "status", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeUnspecified},
},
GroupBy: []qbtypes.GroupByKey{},
Order: []qbtypes.OrderBy{},
},
expectedActions: []string{
"Removed field context from name=status,context=attribute,datatype=string for duplicate key name=status,context=resource,datatype=string",
"Removed field data type from name=status,datatype=string for duplicate key name=status,context=attribute,datatype=number",
"Skipped duplicate SelectField key name=status,context=resource,datatype=string",
"Skipped duplicate SelectField key name=status,context=attribute,datatype=number",
},
description: "Three duplicate entries with various differences should be fully merged",
},
{
name: "duplicate entries in GroupBy",
query: qbtypes.QueryBuilderQuery[any]{
GroupBy: []qbtypes.GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "status", FieldContext: telemetrytypes.FieldContextAttribute, FieldDataType: telemetrytypes.FieldDataTypeString}},
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "status", FieldContext: telemetrytypes.FieldContextResource, FieldDataType: telemetrytypes.FieldDataTypeNumber}},
},
},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{},
GroupBy: []qbtypes.GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "status", FieldContext: telemetrytypes.FieldContextUnspecified, FieldDataType: telemetrytypes.FieldDataTypeUnspecified}},
},
Order: []qbtypes.OrderBy{},
},
expectedActions: []string{
"Removed field context from name=status,context=attribute,datatype=string for duplicate key name=status,context=resource,datatype=number",
"Removed field data type from name=status,datatype=string for duplicate key name=status,context=resource,datatype=number",
"Skipped duplicate GroupBy key name=status,context=resource,datatype=number",
},
description: "Duplicate entries in GroupBy with different context should be merged",
},
{
name: "empty query",
query: qbtypes.QueryBuilderQuery[any]{},
expectedQuery: qbtypes.QueryBuilderQuery[any]{
SelectFields: []telemetrytypes.TelemetryFieldKey{},
GroupBy: []qbtypes.GroupByKey{},
Order: []qbtypes.OrderBy{},
},
expectedActions: []string{},
description: "Empty query should result in empty slices",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Make a copy to avoid modifying the original
query := tt.query
actions := AdjustDuplicateKeys(&query)
assert.Equal(t, tt.expectedQuery.SelectFields, query.SelectFields, "SelectFields mismatch: %s", tt.description)
assert.Equal(t, tt.expectedQuery.GroupBy, query.GroupBy, "GroupBy mismatch: %s", tt.description)
assert.Equal(t, tt.expectedQuery.Order, query.Order, "Order mismatch: %s", tt.description)
assert.Equal(t, tt.expectedActions, actions, "Actions mismatch: %s", tt.description)
})
}
}
func TestAdjustKey(t *testing.T) {
tests := []struct {
name string
key telemetrytypes.TelemetryFieldKey
keys map[string][]*telemetrytypes.TelemetryFieldKey
intrinsicOrCalculatedField *telemetrytypes.TelemetryFieldKey
expectedKey telemetrytypes.TelemetryFieldKey
expectedActions []string
description string
}{
{
name: "intrinsic field with no matching attribute/resource key",
key: telemetrytypes.TelemetryFieldKey{
Name: "trace_id", // User provided key "trace_id" with no context or data type
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": { // This is the intrinsic field itself in the keys map
&telemetrytypes.TelemetryFieldKey{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
intrinsicOrCalculatedField: &telemetrytypes.TelemetryFieldKey{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextLog, // Use intrinsic field context
FieldDataType: telemetrytypes.FieldDataTypeString, // Use intrinsic field data type
},
expectedActions: []string{
"Overriding key: name=trace_id to name=trace_id,context=log,datatype=string",
},
description: "Intrinsic field with no attribute.resource key match should use intrinsic field properties",
},
{
name: "intrinsic field with matching attribute/resource key",
key: telemetrytypes.TelemetryFieldKey{
Name: "trace_id", // User provided key "trace_id" with no context or data type
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{
"trace_id": {
{
Name: "trace_id", // This is an attribute key matching the intrinsic field name
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "trace_id", // This is the intrinsic field itself in the keys map
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
},
intrinsicOrCalculatedField: &telemetrytypes.TelemetryFieldKey{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextUnspecified, // This is left unspecified due to ambiguity
FieldDataType: telemetrytypes.FieldDataTypeString, // This is set to string as both have same type
Materialized: false,
},
expectedActions: []string{"Adjusting key name=trace_id to have data type string"},
description: "Intrinsic field with attribute key match should set data type to string since both in both intrinsic and attribute have same type (ambiguous case)",
},
{
name: "non-intrinsic field with single matching attribute key",
key: telemetrytypes.TelemetryFieldKey{
Name: "custom_field",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{
"custom_field": {
{
Name: "custom_field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
},
intrinsicOrCalculatedField: nil,
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "custom_field",
FieldContext: telemetrytypes.FieldContextAttribute, // Use attribute field context
FieldDataType: telemetrytypes.FieldDataTypeString, // Use attribute field data type
Materialized: true,
},
expectedActions: []string{
"Adjusting key name=custom_field to name=custom_field,context=attribute,datatype=string,materialized=true",
},
description: "Single matching attribute key should use its properties",
},
{
name: "non-intrinsic field with attribute prefix as matching key",
key: telemetrytypes.TelemetryFieldKey{
Name: "custom_field",
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{
"log.custom_field": {
{
Name: "log.custom_field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
},
intrinsicOrCalculatedField: nil,
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "log.custom_field",
FieldContext: telemetrytypes.FieldContextAttribute, // Use attribute field context
FieldDataType: telemetrytypes.FieldDataTypeString, // Use attribute field data type
Materialized: true,
},
expectedActions: []string{
"Adjusting key name=custom_field,context=log to name=log.custom_field,context=attribute,datatype=string,materialized=true",
},
description: "Single matching attribute key should use its properties",
},
{
name: "non-intrinsic field with no matching attribute keys",
key: telemetrytypes.TelemetryFieldKey{
Name: "unknown_field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{},
intrinsicOrCalculatedField: nil,
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "unknown_field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
expectedActions: []string{},
description: "No matching attribute keys should set materialized to false",
},
{
name: "multiple matching keys with different contexts",
key: telemetrytypes.TelemetryFieldKey{
Name: "field", // User provided key "field" with no context and string data type
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{
"field": {
{
Name: "field", // Attribute context
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "field", // Resource context
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
},
intrinsicOrCalculatedField: nil,
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "field",
FieldContext: telemetrytypes.FieldContextUnspecified, // Too ambiguous to set context
FieldDataType: telemetrytypes.FieldDataTypeString, // Both have same data type
Materialized: true,
},
expectedActions: []string{},
description: "Multiple matching keys with different contexts should keep context unspecified but data type specified",
},
{
name: "multiple matching keys with different data types",
key: telemetrytypes.TelemetryFieldKey{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{
"field": {
{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
Materialized: true,
},
},
},
intrinsicOrCalculatedField: nil,
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
Materialized: true,
},
expectedActions: []string{},
description: "Multiple matching keys with different data types should keep data type unspecified but context specified",
},
{
name: "specific context filters matching keys",
key: telemetrytypes.TelemetryFieldKey{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{
"field": {
{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "field",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
},
intrinsicOrCalculatedField: nil,
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
expectedActions: []string{
"Adjusting key name=field,context=attribute to name=field,context=attribute,datatype=string,materialized=true",
},
description: "Specific context should filter to matching keys only",
},
{
name: "specific data type filters matching keys",
key: telemetrytypes.TelemetryFieldKey{
Name: "field",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{
"field": {
{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
Materialized: false,
},
},
},
intrinsicOrCalculatedField: nil,
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
expectedActions: []string{
"Adjusting key name=field,datatype=string to name=field,context=attribute,datatype=string,materialized=true",
},
description: "Specific data type should filter to matching keys only",
},
{
name: "intrinsic field with explicit different context matches metadata",
key: telemetrytypes.TelemetryFieldKey{
Name: "duration",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keys: map[string][]*telemetrytypes.TelemetryFieldKey{
"duration": {
{
Name: "duration", // This is an attribute key matching the intrinsic field name
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
Materialized: false,
},
{
Name: "duration", // This is the intrinsic field itself in the keys map
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
Materialized: true,
},
},
},
intrinsicOrCalculatedField: &telemetrytypes.TelemetryFieldKey{
Name: "duration",
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
Materialized: true,
},
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "duration",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
Materialized: false,
},
expectedActions: []string{"Adjusting key name=duration,context=attribute to name=duration,context=attribute,datatype=number"},
description: "User explicitly specified attribute.duration, should prefer metadata over intrinsic",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
key := tt.key
actions := AdjustKey(&key, tt.keys, tt.intrinsicOrCalculatedField)
assert.Equal(t, tt.expectedKey.Name, key.Name, "Name mismatch: %s", tt.description)
assert.Equal(t, tt.expectedKey.FieldContext, key.FieldContext, "FieldContext mismatch: %s", tt.description)
assert.Equal(t, tt.expectedKey.FieldDataType, key.FieldDataType, "FieldDataType mismatch: %s", tt.description)
assert.Equal(t, tt.expectedKey.Materialized, key.Materialized, "Materialized mismatch: %s", tt.description)
assert.Equal(t, tt.expectedActions, actions, "Actions mismatch: %s", tt.description)
})
}
}

View File

@@ -45,7 +45,7 @@ func TestStmtBuilderTimeSeriesBodyGroupByJSON(t *testing.T) {
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "body.user.age",
Name: "user.age",
},
},
},
@@ -70,7 +70,7 @@ func TestStmtBuilderTimeSeriesBodyGroupByJSON(t *testing.T) {
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "body.education[].awards[].type",
Name: "education[].awards[].type",
},
},
},
@@ -126,12 +126,12 @@ func TestStmtBuilderTimeSeriesBodyGroupByPromoted(t *testing.T) {
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "body.user.age",
Name: "user.age",
},
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "body.user.name",
Name: "user.name",
},
},
},
@@ -240,7 +240,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64 name=education[].parameters,context=body,datatype=[]dynamic]."},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},
@@ -269,7 +269,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64 name=education[].parameters,context=body,datatype=[]dynamic]."},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},
@@ -284,7 +284,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64 name=education[].parameters,context=body,datatype=[]dynamic]."},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},
@@ -313,7 +313,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(toFloat64OrNull(x) -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%4%", float64(4), "%4%", float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64 name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string]."},
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
},
expectedErr: nil,
},
@@ -328,7 +328,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%Good%", "Good", "%Good%", "Good", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64 name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string]."},
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
},
expectedErr: nil,
},
@@ -357,7 +357,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((LOWER(toString(dynamicElement(body_json.`user.age`, 'Int64'))) LIKE LOWER(?)) OR (LOWER(dynamicElement(body_json.`user.age`, 'String')) LIKE LOWER(?))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%25%", "%25%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `user.age` is ambiguous, found 2 different combinations of field context / data type: [name=user.age,context=body,datatype=int64 name=user.age,context=body,datatype=string]."},
Warnings: []string{"Key `user.age` is ambiguous, found 2 different combinations of field context / data type: [name=user.age,context=body,datatype=int64,jsondatatype=Int64 name=user.age,context=body,datatype=string,jsondatatype=String]."},
},
expectedErr: nil,
},
@@ -475,7 +475,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64 name=education[].parameters,context=body,datatype=[]dynamic]."},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},
@@ -504,7 +504,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64 name=education[].parameters,context=body,datatype=[]dynamic]."},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},
@@ -519,7 +519,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64 name=education[].parameters,context=body,datatype=[]dynamic]."},
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
},
expectedErr: nil,
},

View File

@@ -71,7 +71,7 @@ func (b *logQueryStatementBuilder) Build(
return nil, err
}
b.adjustKeys(ctx, keys, query)
query = b.adjustKeys(ctx, keys, query, requestType)
// Create SQL builder
q := sqlbuilder.NewSelectBuilder()
@@ -104,8 +104,22 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) []
for idx := range query.GroupBy {
groupBy := query.GroupBy[idx]
selectors := querybuilder.QueryStringToKeysSelectors(groupBy.TelemetryFieldKey.Name)
keySelectors = append(keySelectors, selectors...)
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
Name: groupBy.Name,
Signal: telemetrytypes.SignalLogs,
FieldContext: groupBy.FieldContext,
FieldDataType: groupBy.FieldDataType,
})
}
for idx := range query.SelectFields {
selectField := query.SelectFields[idx]
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
Name: selectField.Name,
Signal: telemetrytypes.SignalLogs,
FieldContext: selectField.FieldContext,
FieldDataType: selectField.FieldDataType,
})
}
for idx := range query.Order {
@@ -125,88 +139,48 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) []
return keySelectors
}
func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[string][]*telemetrytypes.TelemetryFieldKey, query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) {
// for group by / order by, if there is a key
// that exactly matches the name of intrinsic field but has
// a field context or data type that doesn't match the field context or data type of the
// intrinsic field,
// and there is no additional key present in the data with the incoming key match,
// then override the given context with
// intrinsic field context and data type
// Why does that happen? Because we have a lot of dashboards created by users and shared over web
// that has incorrect context or data type populated so we fix it
// note: this override happens only when there is no match; if there is a match,
// we can't make decision on behalf of users so we let it use unmodified
func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[string][]*telemetrytypes.TelemetryFieldKey, query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], requestType qbtypes.RequestType) qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] {
/*
Adjust keys for alias expressions in aggregations
*/
actions := querybuilder.AdjustKeysForAliasExpressions(&query, requestType)
// example: {"key": "severity_text","type": "tag","dataType": "string"}
// This is sent as "tag", when it's not, this was earlier managed with
// `isColumn`, which we don't have in v5 (because it's not a user concern whether it's mat col or not)
// Such requests as-is look for attributes, the following code exists to handle them
checkMatch := func(k *telemetrytypes.TelemetryFieldKey) {
var overallMatch bool
/*
Check if user is using multiple contexts or data types for same field name
Idea is to use a super set of keys that can satisfy all the usages
findMatch := func(staticKeys map[string]telemetrytypes.TelemetryFieldKey) bool {
// for a given key `k`, iterate over the metadata keys `keys`
// and see if there is any exact match
match := false
for _, mapKey := range keys[k.Name] {
if mapKey.FieldContext == k.FieldContext && mapKey.FieldDataType == k.FieldDataType {
match = true
}
}
// we don't have exact match, then it's doesn't exist in attribute or resource attribute
// use the intrinsic/calculated field
if !match {
b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name)
k.FieldContext = staticKeys[k.Name].FieldContext
k.FieldDataType = staticKeys[k.Name].FieldDataType
}
return match
}
For example, lets consider model_id exists in both attributes and resources
And user is trying to use `attribute.model_id` and `model_id`.
if _, ok := IntrinsicFields[k.Name]; ok {
overallMatch = overallMatch || findMatch(IntrinsicFields)
}
In this case, we'll remove the context from `attribute.model_id`
and make it just `model_id` and remove the duplicate entry.
if strings.HasPrefix(k.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
k.Name = strings.TrimPrefix(k.Name, telemetrytypes.BodyJSONStringSearchPrefix)
fieldKeys, found := keys[k.Name]
if found && len(fieldKeys) > 0 {
k.FieldContext = fieldKeys[0].FieldContext
k.FieldDataType = fieldKeys[0].FieldDataType
// only attach the JSON data type if there is only one key for the field so incase there are multiple keys.
// it's handled by the fallback expr logic
if len(fieldKeys) == 1 {
k.JSONDataType = fieldKeys[0].JSONDataType
k.Materialized = fieldKeys[0].Materialized
}
k.Indexes = fieldKeys[0].Indexes
Same goes with data types.
Consider user is using http.status_code:number and http.status_code
In this case, we'll remove the data type from http.status_code:number
and make it just http.status_code and remove the duplicate entry.
*/
overallMatch = true // because we found a match
} else {
b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name)
k.FieldContext = telemetrytypes.FieldContextBody
k.FieldDataType = telemetrytypes.FieldDataTypeString
k.JSONDataType = &telemetrytypes.String
}
}
actions = append(actions, querybuilder.AdjustDuplicateKeys(&query)...)
if !overallMatch {
// check if all the key for the given field have been materialized, if so
// set the key to materialized
materilized := true
for _, key := range keys[k.Name] {
materilized = materilized && key.Materialized
}
k.Materialized = materilized
}
/*
Now adjust each key to have correct context and data type
Here we try to make intelligent guesses which work for all users (not just majority)
Reason for doing this is to not create an unexpected behavior for users
*/
for idx := range query.SelectFields {
actions = append(actions, b.adjustKey(&query.SelectFields[idx], keys)...)
}
for idx := range query.GroupBy {
checkMatch(&query.GroupBy[idx].TelemetryFieldKey)
actions = append(actions, b.adjustKey(&query.GroupBy[idx].TelemetryFieldKey, keys)...)
}
for idx := range query.Order {
checkMatch(&query.Order[idx].Key.TelemetryFieldKey)
actions = append(actions, b.adjustKey(&query.Order[idx].Key.TelemetryFieldKey, keys)...)
}
for _, action := range actions {
// TODO: change to debug level once we are confident about the behavior
b.logger.InfoContext(ctx, "key adjustment action", "action", action)
}
keys["id"] = []*telemetrytypes.TelemetryFieldKey{
@@ -217,6 +191,21 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
FieldDataType: telemetrytypes.FieldDataTypeString,
},
}
return query
}
func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) []string {
// First check if it matches with any intrinsic fields
var intrinsicOrCalculatedField telemetrytypes.TelemetryFieldKey
if _, ok := IntrinsicFields[key.Name]; ok {
intrinsicOrCalculatedField = IntrinsicFields[key.Name]
return querybuilder.AdjustKey(key, keys, &intrinsicOrCalculatedField)
}
return querybuilder.AdjustKey(key, keys, nil)
}
// buildListQuery builds a query for list panel type

View File

@@ -633,3 +633,224 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
})
}
}
func TestAdjustKey(t *testing.T) {
cases := []struct {
name string
inputKey telemetrytypes.TelemetryFieldKey
keysMap map[string][]*telemetrytypes.TelemetryFieldKey
expectedKey telemetrytypes.TelemetryFieldKey
}{
{
name: "intrinsic field with no other key match - use intrinsic",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "severity_text",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: IntrinsicFields["severity_text"],
},
{
name: "intrinsic field with other key match - no override",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "body",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: map[string][]*telemetrytypes.TelemetryFieldKey{
"body": {
{
Name: "body",
FieldContext: telemetrytypes.FieldContextBody,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
{
Name: "body",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "body",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
{
name: "json field with no context specified",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "severity_number",
FieldContext: telemetrytypes.FieldContextBody,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "severity_number",
FieldContext: telemetrytypes.FieldContextBody,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
},
{
name: "single matching key in metadata",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: *buildCompleteFieldKeyMap()["service.name"][0],
},
{
name: "single matching key with incorrect context specified - no override",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
{
name: "single matching key with no context specified - override",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: *buildCompleteFieldKeyMap()["service.name"][0],
},
{
name: "multiple matching keys - all materialized",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "multi.mat.key",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "multi.mat.key",
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
{
name: "multiple matching keys - mixed materialization",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "mixed.materialization.key",
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
{
name: "multiple matching keys with context specified",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: *buildCompleteFieldKeyMap()["mixed.materialization.key"][0],
},
{
name: "no matching keys - unknown field",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "unknown.field",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "unknown.field",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
Materialized: false,
},
},
{
name: "no matching keys with context filter",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "unknown.field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "unknown.field",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
Materialized: false,
},
},
{
name: "materialized field",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "mat.key",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: *buildCompleteFieldKeyMap()["mat.key"][0],
},
{
name: "non-materialized field",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: *buildCompleteFieldKeyMap()["user.id"][0],
},
}
fm := NewFieldMapper()
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMapCollision()
cb := NewConditionBuilder(fm, mockMetadataStore)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,
)
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
// Create a copy of the input key to avoid modifying the original
key := c.inputKey
// Call adjustKey
statementBuilder.adjustKey(&key, c.keysMap)
// Verify the key was adjusted as expected
require.Equal(t, c.expectedKey.Name, key.Name, "key name should match")
require.Equal(t, c.expectedKey.FieldContext, key.FieldContext, "field context should match")
require.Equal(t, c.expectedKey.FieldDataType, key.FieldDataType, "field data type should match")
require.Equal(t, c.expectedKey.Materialized, key.Materialized, "materialized should match")
require.Equal(t, c.expectedKey.JSONDataType, key.JSONDataType, "json data type should match")
require.Equal(t, c.expectedKey.Indexes, key.Indexes, "json exists should match")
})
}
}

View File

@@ -322,6 +322,18 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"severity_number": {
{
Name: "severity_number",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
{
Name: "severity_number",
FieldContext: telemetrytypes.FieldContextBody,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
},
"created_at": {
{
Name: "created_at",
@@ -890,6 +902,42 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"mixed.materialization.key": {
{
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
"multi.mat.key": {
{
Name: "multi.mat.key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "multi.mat.key",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
"mat.key": {
{
Name: "mat.key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
}
for _, keys := range keysMap {
@@ -942,6 +990,14 @@ func buildCompleteFieldKeyMapCollision() map[string][]*telemetrytypes.TelemetryF
FieldDataType: telemetrytypes.FieldDataTypeString,
},
},
"materialized.key.name": {
{
Name: "materialized.key.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
}
for _, keys := range keysMap {

View File

@@ -74,7 +74,7 @@ func (b *traceQueryStatementBuilder) Build(
return nil, err
}
b.adjustKeys(ctx, keys, query)
query = b.adjustKeys(ctx, keys, query, requestType)
// Check if filter contains trace_id(s) and optimize time range if needed
if query.Filter != nil && query.Filter.Expression != "" && b.telemetryStore != nil {
@@ -126,23 +126,29 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation])
for idx := range query.GroupBy {
groupBy := query.GroupBy[idx]
selectors := querybuilder.QueryStringToKeysSelectors(groupBy.TelemetryFieldKey.Name)
keySelectors = append(keySelectors, selectors...)
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
Name: groupBy.Name,
Signal: telemetrytypes.SignalTraces,
FieldContext: groupBy.FieldContext,
FieldDataType: groupBy.FieldDataType,
})
}
for idx := range query.SelectFields {
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
Name: query.SelectFields[idx].Name,
Signal: telemetrytypes.SignalTraces,
FieldContext: query.SelectFields[idx].FieldContext,
Name: query.SelectFields[idx].Name,
Signal: telemetrytypes.SignalTraces,
FieldContext: query.SelectFields[idx].FieldContext,
FieldDataType: query.SelectFields[idx].FieldDataType,
})
}
for idx := range query.Order {
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
Name: query.Order[idx].Key.Name,
Signal: telemetrytypes.SignalTraces,
FieldContext: query.Order[idx].Key.FieldContext,
Name: query.Order[idx].Key.Name,
Signal: telemetrytypes.SignalTraces,
FieldContext: query.Order[idx].Key.FieldContext,
FieldDataType: query.Order[idx].Key.FieldDataType,
})
}
@@ -154,77 +160,47 @@ func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation])
return keySelectors
}
func (b *traceQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[string][]*telemetrytypes.TelemetryFieldKey, query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) {
// for group by / order by / selected fields, if there is a key
// that exactly matches the name of intrinsic / calculated field but has
// a field context or data type that doesn't match the field context or data type of the
// intrinsic field,
// and there is no additional key present in the data with the incoming key match,
// then override the given context with
// intrinsic / calculated field context and data type
// Why does that happen? Because we have a lot of assets created by users and shared over web
// that has incorrect context or data type populated so we fix it
// note: this override happens only when there is no match; if there is a match,
// we can't make decision on behalf of users so we let it use unmodified
func (b *traceQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[string][]*telemetrytypes.TelemetryFieldKey, query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation], requestType qbtypes.RequestType) qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation] {
// example: {"key": "httpRoute","type": "tag","dataType": "string"}
// This is sent as "tag", when it's not, this was earlier managed with
// `isColumn`, which we don't have in v5 (because it's not a user concern whether it's mat col or not)
// Such requests as-is look for attributes, the following code exists to handle them
checkMatch := func(k *telemetrytypes.TelemetryFieldKey) {
var overallMatch bool
// Adjust keys for alias expressions in aggregations
actions := querybuilder.AdjustKeysForAliasExpressions(&query, requestType)
findMatch := func(staticKeys map[string]telemetrytypes.TelemetryFieldKey) bool {
// for a given key `k`, iterate over the metadata keys `keys`
// and see if there is any exact match
match := false
for _, mapKey := range keys[k.Name] {
if mapKey.FieldContext == k.FieldContext && mapKey.FieldDataType == k.FieldDataType {
match = true
}
}
// we don't have exact match, then it's doesn't exist in attribute or resource attribute
// use the intrinsic/calculated field
if !match {
b.logger.InfoContext(ctx, "overriding the field context and data type", "key", k.Name)
k.FieldContext = staticKeys[k.Name].FieldContext
k.FieldDataType = staticKeys[k.Name].FieldDataType
}
return match
}
/*
Check if user is using multiple contexts or data types for same field name
Idea is to use a super set of keys that can satisfy all the usages
if _, ok := IntrinsicFields[k.Name]; ok {
overallMatch = overallMatch || findMatch(IntrinsicFields)
}
if _, ok := CalculatedFields[k.Name]; ok {
overallMatch = overallMatch || findMatch(CalculatedFields)
}
if _, ok := IntrinsicFieldsDeprecated[k.Name]; ok {
overallMatch = overallMatch || findMatch(IntrinsicFieldsDeprecated)
}
if _, ok := CalculatedFieldsDeprecated[k.Name]; ok {
overallMatch = overallMatch || findMatch(CalculatedFieldsDeprecated)
}
For example, lets consider model_id exists in both attributes and resources
And user is trying to use `attribute.model_id` and `model_id`.
if !overallMatch {
// check if all the key for the given field have been materialized, if so
// set the key to materialized
materilized := true
for _, key := range keys[k.Name] {
materilized = materilized && key.Materialized
}
k.Materialized = materilized
}
In this case, we'll remove the context from `attribute.model_id`
and make it just `model_id` and remove the duplicate entry.
Same goes with data types.
Consider user is using http.status_code:number and http.status_code
In this case, we'll remove the data type from http.status_code:number
and make it just http.status_code and remove the duplicate entry.
*/
actions = append(actions, querybuilder.AdjustDuplicateKeys(&query)...)
/*
Now adjust each key to have correct context and data type
Here we try to make intelligent guesses which work for all users (not just majority)
Reason for doing this is to not create an unexpected behavior for users
*/
for idx := range query.SelectFields {
actions = append(actions, b.adjustKey(&query.SelectFields[idx], keys)...)
}
for idx := range query.GroupBy {
checkMatch(&query.GroupBy[idx].TelemetryFieldKey)
actions = append(actions, b.adjustKey(&query.GroupBy[idx].TelemetryFieldKey, keys)...)
}
for idx := range query.Order {
checkMatch(&query.Order[idx].Key.TelemetryFieldKey)
actions = append(actions, b.adjustKey(&query.Order[idx].Key.TelemetryFieldKey, keys)...)
}
for idx := range query.SelectFields {
checkMatch(&query.SelectFields[idx])
for _, action := range actions {
// TODO: change to debug level once we are confident about the behavior
b.logger.InfoContext(ctx, "key adjustment action", "action", action)
}
// add deprecated fields only during statement building
@@ -246,6 +222,42 @@ func (b *traceQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[st
keys[fieldKeyName] = append(keys[fieldKeyName], &fieldKey)
}
}
return query
}
func (b *traceQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) []string {
// for recording actions taken
actions := []string{}
/*
Check if this key is an intrinsic or calculated field
For example: trace_id (intrinsic), response_status_code (calculated).
*/
var isIntrinsicOrCalculatedField bool
var intrinsicOrCalculatedField telemetrytypes.TelemetryFieldKey
if _, ok := IntrinsicFields[key.Name]; ok {
isIntrinsicOrCalculatedField = true
intrinsicOrCalculatedField = IntrinsicFields[key.Name]
} else if _, ok := CalculatedFields[key.Name]; ok {
isIntrinsicOrCalculatedField = true
intrinsicOrCalculatedField = CalculatedFields[key.Name]
} else if _, ok := IntrinsicFieldsDeprecated[key.Name]; ok {
isIntrinsicOrCalculatedField = true
intrinsicOrCalculatedField = IntrinsicFieldsDeprecated[key.Name]
} else if _, ok := CalculatedFieldsDeprecated[key.Name]; ok {
isIntrinsicOrCalculatedField = true
intrinsicOrCalculatedField = CalculatedFieldsDeprecated[key.Name]
}
if isIntrinsicOrCalculatedField {
actions = append(actions, querybuilder.AdjustKey(key, keys, &intrinsicOrCalculatedField)...)
} else {
actions = append(actions, querybuilder.AdjustKey(key, keys, nil)...)
}
return actions
}
// buildListQuery builds a query for list panel type

View File

@@ -5,6 +5,7 @@ import (
"testing"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
@@ -201,6 +202,7 @@ func TestStatementBuilder(t *testing.T) {
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "sum(metric.max_count)",
Alias: "metric.max_count",
},
},
Filter: &qbtypes.Filter{
@@ -214,9 +216,20 @@ func TestStatementBuilder(t *testing.T) {
},
},
},
Order: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "max_count",
FieldContext: telemetrytypes.FieldContextMetric,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 desc LIMIT ?) SELECT toStartOfInterval(timestamp, INTERVAL 30 SECOND) AS ts, toString(multiIf(multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL) IS NOT NULL, multiIf(resource.`service.name` IS NOT NULL, resource.`service.name`::String, mapContains(resources_string, 'service.name'), resources_string['service.name'], NULL), NULL)) AS `service.name`, sum(multiIf(mapContains(attributes_number, 'metric.max_count') = ?, toFloat64(attributes_number['metric.max_count']), NULL)) AS __result_0 FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY ts desc",
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10, true, "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448)},
},
expectedErr: nil,
@@ -519,6 +532,134 @@ func TestStatementBuilderListQuery(t *testing.T) {
},
expectedErr: nil,
},
{
name: "List query with legacy fields with mixed materialization field",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
Filter: &qbtypes.Filter{
Expression: "service.name = 'redis-manual'",
},
SelectFields: []telemetrytypes.TelemetryFieldKey{
{
Name: "name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "serviceName",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "durationNano",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
{
Name: "httpMethod",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
// "mixed.materialization.key" exists in both attribute and resource,
// attribute being materialized and resource being non-materialized.
Name: "mixed.materialization.key",
},
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT name AS `name`, resource_string_service$$name AS `serviceName`, duration_nano AS `durationNano`, http_method AS `httpMethod`, multiIf(toString(`attribute_string_mixed$$materialization$$key`) != '', toString(`attribute_string_mixed$$materialization$$key`), toString(multiIf(resource.`mixed.materialization.key` IS NOT NULL, resource.`mixed.materialization.key`::String, mapContains(resources_string, 'mixed.materialization.key'), resources_string['mixed.materialization.key'], NULL)) != '', toString(multiIf(resource.`mixed.materialization.key` IS NOT NULL, resource.`mixed.materialization.key`::String, mapContains(resources_string, 'mixed.materialization.key'), resources_string['mixed.materialization.key'], NULL)), NULL) AS `mixed.materialization.key`, timestamp AS `timestamp`, span_id AS `span_id`, trace_id AS `trace_id` FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "List query with legacy fields with mixed materialization field with context provided",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
Filter: &qbtypes.Filter{
Expression: "service.name = 'redis-manual'",
},
SelectFields: []telemetrytypes.TelemetryFieldKey{
{
Name: "name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "serviceName",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "durationNano",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
{
Name: "httpMethod",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
// "mixed.materialization.key" exists in both attribute and resource,
// attribute being materialized and resource being non-materialized.
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextAttribute,
},
},
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT name AS `name`, resource_string_service$$name AS `serviceName`, duration_nano AS `durationNano`, http_method AS `httpMethod`, `attribute_string_mixed$$materialization$$key` AS `mixed.materialization.key`, timestamp AS `timestamp`, span_id AS `span_id`, trace_id AS `trace_id` FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{"redis-manual", "%service.name%", "%service.name\":\"redis-manual%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10},
},
expectedErr: nil,
},
{
name: "List query with legacy fields with field that doesn't exist",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
Filter: &qbtypes.Filter{
Expression: "service.name = 'redis-manual'",
},
SelectFields: []telemetrytypes.TelemetryFieldKey{
{
Name: "name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "serviceName",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "durationNano",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
{
Name: "httpMethod",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: "non-existent.key",
},
},
Limit: 10,
},
expected: qbtypes.Statement{},
expectedErr: errors.NewInvalidInputf(errors.CodeInvalidInput, "field not found"),
},
}
fm := NewFieldMapper()
@@ -660,7 +801,7 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
expectedErr: nil,
},
{
name: "list query with deprecated filter field",
name: "list query with deprecated filter field",
requestType: qbtypes.RequestTypeTrace,
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Signal: telemetrytypes.SignalTraces,
@@ -712,3 +853,546 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
})
}
}
func TestAdjustKey(t *testing.T) {
cases := []struct {
name string
inputKey telemetrytypes.TelemetryFieldKey
keysMap map[string][]*telemetrytypes.TelemetryFieldKey
expectedKey telemetrytypes.TelemetryFieldKey
}{
{
name: "intrinsic field with no metadata match - override",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: IntrinsicFields["trace_id"],
},
{
name: "intrinsic field with metadata match with incorrect context - override",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "duration_nano",
FieldContext: telemetrytypes.FieldContextBody, // incorrect context
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "duration_nano",
FieldContext: telemetrytypes.FieldContextSpan, // should be corrected
FieldDataType: telemetrytypes.FieldDataTypeNumber, // modified
},
},
{
name: "intrinsic field with metadata match with correct context - no override",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "duration_nano",
FieldContext: telemetrytypes.FieldContextSpan, // correct context
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "duration_nano",
FieldContext: telemetrytypes.FieldContextSpan, // should be corrected
FieldDataType: telemetrytypes.FieldDataTypeInt64, // not modified
},
},
{
name: "single matching key in metadata - override",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: *buildCompleteFieldKeyMap()["service.name"][0],
},
{
name: "single matching key with context specified - override",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "cart.items_count",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: *buildCompleteFieldKeyMap()["cart.items_count"][0],
},
{
name: "multiple matching keys - all materialized",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "multi.mat.key",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: map[string][]*telemetrytypes.TelemetryFieldKey{
"multi.mat.key": {
{
Name: "multi.mat.key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "multi.mat.key",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
Materialized: true,
},
},
},
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "multi.mat.key",
Materialized: true,
},
},
{
name: "multiple matching keys - mixed materialization",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "mixed.materialization.key",
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
{
name: "multiple matching keys with context specified",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
},
{
name: "no matching keys - unknown field",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "unknown.field",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "unknown.field",
Materialized: false,
},
},
{
name: "no matching keys with context filter",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
Materialized: false,
},
},
{
name: "materialized field",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "cart.items_count",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "cart.items_count",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
Materialized: true,
},
},
{
name: "non-materialized field",
inputKey: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
keysMap: buildCompleteFieldKeyMap(),
expectedKey: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
// Create a copy of the input key to avoid modifying the original
key := c.inputKey
// Call adjustKey
statementBuilder.adjustKey(&key, c.keysMap)
// Verify the key was adjusted as expected
require.Equal(t, c.expectedKey.Name, key.Name, "key name should match")
require.Equal(t, c.expectedKey.FieldContext, key.FieldContext, "field context should match")
require.Equal(t, c.expectedKey.FieldDataType, key.FieldDataType, "field data type should match")
require.Equal(t, c.expectedKey.Materialized, key.Materialized, "materialized should match")
})
}
}
func TestAdjustKeys(t *testing.T) {
cases := []struct {
name string
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]
keysMap map[string][]*telemetrytypes.TelemetryFieldKey
expectedSelectFields []telemetrytypes.TelemetryFieldKey
expectedGroupBy []qbtypes.GroupByKey
expectedOrder []qbtypes.OrderBy
expectDeprecatedFieldsAdd bool
}{
{
name: "adjust select fields",
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
{
Name: "cart.items_count",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
keysMap: buildCompleteFieldKeyMap(),
expectedSelectFields: []telemetrytypes.TelemetryFieldKey{
{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
{
Name: "cart.items_count",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
Materialized: true,
},
},
expectDeprecatedFieldsAdd: true,
},
{
name: "adjust group by fields",
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
},
keysMap: buildCompleteFieldKeyMap(),
expectedGroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
},
expectDeprecatedFieldsAdd: true,
},
{
name: "adjust order by fields",
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Order: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
Direction: qbtypes.OrderDirectionAsc,
},
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "cart.items_count",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
},
keysMap: buildCompleteFieldKeyMap(),
expectedOrder: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
Direction: qbtypes.OrderDirectionAsc,
},
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "cart.items_count",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeFloat64,
Materialized: true,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
expectDeprecatedFieldsAdd: true,
},
{
name: "adjust all field types together",
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
SelectFields: []telemetrytypes.TelemetryFieldKey{
{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
GroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
},
Order: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextUnspecified,
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
},
keysMap: buildCompleteFieldKeyMap(),
expectedSelectFields: []telemetrytypes.TelemetryFieldKey{
{
Name: "trace_id",
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
expectedGroupBy: []qbtypes.GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
},
expectedOrder: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "user.id",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
expectDeprecatedFieldsAdd: true,
},
{
name: "adjust keys for alias expressions in aggregations - order by",
query: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
Aggregations: []qbtypes.TraceAggregation{
{
Expression: "sum(span.duration)",
Alias: "span.duration",
},
},
Order: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "duration",
FieldContext: telemetrytypes.FieldContextSpan,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
},
keysMap: buildCompleteFieldKeyMap(),
// After alias adjustment, name becomes "span.duration" with FieldContextUnspecified
// "span.duration" is not in keysMap, so context stays unspecified
expectedOrder: []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "span.duration",
FieldContext: telemetrytypes.FieldContextUnspecified,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
expectDeprecatedFieldsAdd: true,
},
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
// Create a deep copy of the keys map to avoid modifying the original
keysMapCopy := make(map[string][]*telemetrytypes.TelemetryFieldKey)
for k, v := range c.keysMap {
keysMapCopy[k] = make([]*telemetrytypes.TelemetryFieldKey, len(v))
copy(keysMapCopy[k], v)
}
// Call adjustKeys
c.query = statementBuilder.adjustKeys(context.Background(), keysMapCopy, c.query, qbtypes.RequestTypeScalar)
// Verify select fields were adjusted
if c.expectedSelectFields != nil {
require.Len(t, c.query.SelectFields, len(c.expectedSelectFields))
for i, expected := range c.expectedSelectFields {
require.Equal(t, expected.Name, c.query.SelectFields[i].Name, "select field %d name should match", i)
require.Equal(t, expected.FieldContext, c.query.SelectFields[i].FieldContext, "select field %d context should match", i)
require.Equal(t, expected.FieldDataType, c.query.SelectFields[i].FieldDataType, "select field %d data type should match", i)
require.Equal(t, expected.Materialized, c.query.SelectFields[i].Materialized, "select field %d materialized should match", i)
}
}
// Verify group by fields were adjusted
if c.expectedGroupBy != nil {
require.Len(t, c.query.GroupBy, len(c.expectedGroupBy))
for i, expected := range c.expectedGroupBy {
require.Equal(t, expected.TelemetryFieldKey.Name, c.query.GroupBy[i].TelemetryFieldKey.Name, "group by field %d name should match", i)
require.Equal(t, expected.TelemetryFieldKey.FieldContext, c.query.GroupBy[i].TelemetryFieldKey.FieldContext, "group by field %d context should match", i)
require.Equal(t, expected.TelemetryFieldKey.FieldDataType, c.query.GroupBy[i].TelemetryFieldKey.FieldDataType, "group by field %d data type should match", i)
require.Equal(t, expected.TelemetryFieldKey.Materialized, c.query.GroupBy[i].TelemetryFieldKey.Materialized, "group by field %d materialized should match", i)
}
}
// Verify order by fields were adjusted
if c.expectedOrder != nil {
require.Len(t, c.query.Order, len(c.expectedOrder))
for i, expected := range c.expectedOrder {
require.Equal(t, expected.Key.TelemetryFieldKey.Name, c.query.Order[i].Key.TelemetryFieldKey.Name, "order field %d name should match", i)
require.Equal(t, expected.Key.TelemetryFieldKey.FieldContext, c.query.Order[i].Key.TelemetryFieldKey.FieldContext, "order field %d context should match", i)
require.Equal(t, expected.Key.TelemetryFieldKey.FieldDataType, c.query.Order[i].Key.TelemetryFieldKey.FieldDataType, "order field %d data type should match", i)
require.Equal(t, expected.Key.TelemetryFieldKey.Materialized, c.query.Order[i].Key.TelemetryFieldKey.Materialized, "order field %d materialized should match", i)
require.Equal(t, expected.Direction, c.query.Order[i].Direction, "order field %d direction should match", i)
}
}
// Verify deprecated fields were added to the keys map
if c.expectDeprecatedFieldsAdd {
// Check that at least some deprecated fields were added
foundDeprecatedField := false
for fieldName := range IntrinsicFieldsDeprecated {
if _, ok := keysMapCopy[fieldName]; ok {
foundDeprecatedField = true
break
}
}
if !foundDeprecatedField {
for fieldName := range CalculatedFieldsDeprecated {
if _, ok := keysMapCopy[fieldName]; ok {
foundDeprecatedField = true
break
}
}
}
require.True(t, foundDeprecatedField, "at least one deprecated field should be added to keys map")
}
})
}
}

View File

@@ -62,6 +62,11 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
FieldContext: telemetrytypes.FieldContextSpan,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
{
Name: "duration_nano",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeInt64,
},
},
"http.method": {
{
@@ -85,6 +90,20 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
Materialized: true,
},
},
"mixed.materialization.key": {
{
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextAttribute,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: true,
},
{
Name: "mixed.materialization.key",
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
Materialized: false,
},
},
"isentrypoint": {
{
Name: "isentrypoint",

View File

@@ -370,7 +370,12 @@ func (q *QueryBuilderQuery[T]) validateOrderByForAggregation() error {
for i, order := range q.Order {
orderKey := order.Key.Name
if !validOrderKeys[orderKey] {
// Also check the context-prefixed key name for alias matching
// This handles cases where user specifies alias like "span.count_" and
// order by comes as FieldContext=span, Name=count_
contextPrefixedKey := fmt.Sprintf("%s.%s", order.Key.FieldContext.StringValue(), order.Key.Name)
if !validOrderKeys[orderKey] && !validOrderKeys[contextPrefixedKey] {
orderId := fmt.Sprintf("order by clause #%d", i+1)
if q.Name != "" {
orderId = fmt.Sprintf("order by clause #%d in query '%s'", i+1, q.Name)

View File

@@ -332,3 +332,184 @@ func TestQueryRangeRequest_ValidateAllQueriesNotDisabled(t *testing.T) {
})
}
}
func TestQueryRangeRequest_ValidateOrderByForAggregation(t *testing.T) {
tests := []struct {
name string
query QueryBuilderQuery[TraceAggregation]
wantErr bool
errMsg string
}{
{
name: "order by with context-prefixed alias should pass",
query: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{
Expression: "count()",
Alias: "span.count_",
},
},
Order: []OrderBy{
{
Direction: OrderDirectionDesc,
Key: OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "count_",
FieldContext: telemetrytypes.FieldContextSpan,
},
},
},
},
},
wantErr: false,
},
{
name: "order by with alias directly should pass",
query: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{
Expression: "count()",
Alias: "my_count",
},
},
Order: []OrderBy{
{
Direction: OrderDirectionDesc,
Key: OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "my_count",
},
},
},
},
},
wantErr: false,
},
{
name: "order by with expression should pass",
query: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{
Expression: "count()",
},
},
Order: []OrderBy{
{
Direction: OrderDirectionDesc,
Key: OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "count()",
},
},
},
},
},
wantErr: false,
},
{
name: "order by with invalid key should fail",
query: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{
Expression: "count()",
Alias: "my_count",
},
},
Order: []OrderBy{
{
Direction: OrderDirectionDesc,
Key: OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "invalid_key",
},
},
},
},
},
wantErr: true,
errMsg: "invalid order by key",
},
{
name: "order by with group by key should pass",
query: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{
Expression: "count()",
},
},
GroupBy: []GroupByKey{
{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
},
},
},
Order: []OrderBy{
{
Direction: OrderDirectionAsc,
Key: OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "service.name",
},
},
},
},
},
wantErr: false,
},
{
name: "order by with resource context-prefixed alias should pass",
query: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{
Expression: "count()",
Alias: "resource.count_",
},
},
Order: []OrderBy{
{
Direction: OrderDirectionDesc,
Key: OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "count_",
FieldContext: telemetrytypes.FieldContextResource,
},
},
},
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.query.Validate(RequestTypeTimeSeries)
if tt.wantErr {
if err == nil {
t.Errorf("validateOrderByForAggregation() expected error but got none")
return
}
if tt.errMsg != "" && !contains(err.Error(), tt.errMsg) {
t.Errorf("validateOrderByForAggregation() error = %v, want to contain %v", err.Error(), tt.errMsg)
}
} else {
if err != nil {
t.Errorf("validateOrderByForAggregation() unexpected error = %v", err)
}
}
})
}
}

View File

@@ -48,6 +48,22 @@ func (f TelemetryFieldKey) String() string {
if f.FieldDataType != FieldDataTypeUnspecified {
sb.WriteString(fmt.Sprintf(",datatype=%s", f.FieldDataType.StringValue()))
}
if f.Materialized {
sb.WriteString(",materialized=true")
}
if f.JSONDataType != nil {
sb.WriteString(fmt.Sprintf(",jsondatatype=%s", f.JSONDataType.StringValue()))
}
if len(f.Indexes) > 0 {
sb.WriteString(",indexes=[")
for i, index := range f.Indexes {
if i > 0 {
sb.WriteString("; ")
}
sb.WriteString(fmt.Sprintf("{type=%s, columnExpr=%s, indexExpr=%s}", index.Type.StringValue(), index.ColumnExpression, index.IndexExpression))
}
sb.WriteString("]")
}
return sb.String()
}
@@ -55,6 +71,12 @@ func (f TelemetryFieldKey) Text() string {
return TelemetryFieldKeyToText(&f)
}
func (f *TelemetryFieldKey) Equal(key *TelemetryFieldKey) bool {
return f.Name == key.Name &&
f.FieldContext == key.FieldContext &&
f.FieldDataType == key.FieldDataType
}
// Normalize parses and normalizes a TelemetryFieldKey by extracting
// the field context and data type from the field name if they are not already specified.
// This function modifies the key in place.

View File

@@ -270,7 +270,6 @@ def test_traces_list(
)
assert_identical_query_response(response, response_with_inline_context)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
@@ -518,9 +517,7 @@ def test_traces_list(
# Case 6a: count by span.count_
pytest.param({"name": "span.count_"}, "count_", HTTPStatus.OK),
# Case 6b: count by span.count_ with alias span.count_
pytest.param(
{"name": "span.count_"}, "span.count_", HTTPStatus.BAD_REQUEST
), # THIS SHOULD BE OK BUT FAILS DUE TO LIMITATION IN CURRENT IMPLEMENTATION
pytest.param({"name": "span.count_"}, "span.count_", HTTPStatus.OK),
# Case 7a: count by span.count_ and context specified in the key [BAD REQUEST]
pytest.param(
{"name": "span.count_", "fieldContext": "span"},