chore: recognize variable in expression (#8328)

This commit is contained in:
Srikanth Chekuri
2025-06-23 14:00:50 +05:30
committed by GitHub
parent f0994e52c0
commit 6d009c6607
27 changed files with 1311 additions and 322 deletions

View File

@@ -59,7 +59,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Querier),
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier),
})
if err != nil {

View File

@@ -79,11 +79,13 @@ comparison
inClause
: IN LPAREN valueList RPAREN
| IN LBRACK valueList RBRACK
| IN value
;
notInClause
: NOT IN LPAREN valueList RPAREN
| NOT IN LBRACK valueList RBRACK
| NOT IN value
;
// List of values for in(...) or in[...]
@@ -206,7 +208,7 @@ QUOTED_TEXT
)
;
fragment SEGMENT : [a-zA-Z] [a-zA-Z0-9_:\-]* ;
fragment SEGMENT : [a-zA-Z$] [a-zA-Z0-9$_:\-]* ;
fragment EMPTY_BRACKS : '[' ']' ;
fragment OLD_JSON_BRACKS: '[' '*' ']';

View File

@@ -70,7 +70,7 @@ func parseFieldKeyRequest(r *http.Request) (*telemetrytypes.FieldKeySelector, er
}
}
name := r.URL.Query().Get("name")
name := r.URL.Query().Get("searchText")
req = telemetrytypes.FieldKeySelector{
StartUnixMilli: startUnixMilli,
@@ -92,8 +92,10 @@ func parseFieldValueRequest(r *http.Request) (*telemetrytypes.FieldValueSelector
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse field key request")
}
name := r.URL.Query().Get("name")
keySelector.Name = name
existingQuery := r.URL.Query().Get("existingQuery")
value := r.URL.Query().Get("value")
value := r.URL.Query().Get("searchText")
// Parse limit for fieldValue request, fallback to default 50 if parsing fails.
limit, err := strconv.Atoi(r.URL.Query().Get("limit"))

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -110,53 +110,53 @@ func filterquerylexerLexerInit() {
67, 99, 99, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 72, 72,
104, 104, 2, 0, 89, 89, 121, 121, 2, 0, 85, 85, 117, 117, 2, 0, 70, 70,
102, 102, 2, 0, 43, 43, 45, 45, 2, 0, 34, 34, 92, 92, 2, 0, 39, 39, 92,
92, 2, 0, 65, 90, 97, 122, 5, 0, 45, 45, 48, 58, 65, 90, 95, 95, 97, 122,
3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10, 13, 13, 32, 34,
39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, 0, 1, 1, 0, 0, 0, 0, 3, 1,
0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1,
0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19,
1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0,
27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0,
0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0,
0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0,
0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 59, 1,
0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0, 0, 75,
1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, 0, 0, 5, 81, 1, 0, 0, 0, 7,
83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, 1, 0, 0, 0, 13, 92, 1, 0, 0,
0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, 19, 100, 1, 0, 0, 0, 21, 103,
1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, 1, 0, 0, 0, 27, 113, 1, 0, 0,
0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 146, 1, 0, 0, 0, 35, 154,
1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, 1, 0, 0, 0, 41, 179, 1, 0, 0,
0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, 0, 47, 190, 1, 0, 0, 0, 49, 193,
1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, 1, 0, 0, 0, 55, 220, 1, 0, 0,
0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, 0, 61, 294, 1, 0, 0, 0, 63, 296,
1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, 1, 0, 0, 0, 69, 310, 1, 0, 0,
0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, 0, 75, 330, 1, 0, 0, 0, 77, 78,
5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, 5, 41, 0, 0, 80, 4, 1, 0, 0, 0,
81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, 83, 84, 5, 93, 0, 0, 84, 8, 1,
0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, 0, 0, 0, 87, 91, 5, 61, 0, 0,
88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, 90, 87, 1, 0, 0, 0, 90, 88, 1,
0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, 33, 0, 0, 93, 94, 5, 61, 0, 0,
94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, 96, 97, 5, 62, 0, 0, 97, 16, 1,
0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, 0, 0, 0, 100, 101, 5, 60, 0, 0,
101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, 0, 103, 104, 5, 62, 0, 0, 104,
22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, 107, 5, 61, 0, 0, 107, 24,
1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111, 7, 2,
0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, 0, 0, 113, 114, 7, 4, 0, 0,
114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, 116, 118, 7, 7, 0, 0, 117,
116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120,
1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 7, 0, 0, 0, 122, 123, 7, 1,
0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0, 125, 28, 1, 0, 0, 0,
126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, 128, 129, 7, 1, 0, 0, 129,
130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 7,
4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, 6, 0, 0, 135, 137, 7, 7, 0,
0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0, 138,
139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 7, 1, 0, 0, 141, 142,
7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, 7, 2, 0, 0, 144, 145, 7, 3,
0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, 0, 0, 147, 148, 7, 3, 0, 0,
148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, 150, 151, 7, 3, 0, 0, 151,
152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, 34, 1, 0, 0, 0, 154, 155, 7,
3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, 7, 1, 0, 0, 157, 158, 7, 11,
92, 3, 0, 36, 36, 65, 90, 97, 122, 6, 0, 36, 36, 45, 45, 48, 58, 65, 90,
95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 8, 0, 9, 10,
13, 13, 32, 34, 39, 41, 44, 44, 60, 62, 91, 91, 93, 93, 358, 0, 1, 1, 0,
0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0,
0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1,
0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25,
1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0,
33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0,
0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0,
0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0,
0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1,
0, 0, 0, 0, 75, 1, 0, 0, 0, 1, 77, 1, 0, 0, 0, 3, 79, 1, 0, 0, 0, 5, 81,
1, 0, 0, 0, 7, 83, 1, 0, 0, 0, 9, 85, 1, 0, 0, 0, 11, 90, 1, 0, 0, 0, 13,
92, 1, 0, 0, 0, 15, 95, 1, 0, 0, 0, 17, 98, 1, 0, 0, 0, 19, 100, 1, 0,
0, 0, 21, 103, 1, 0, 0, 0, 23, 105, 1, 0, 0, 0, 25, 108, 1, 0, 0, 0, 27,
113, 1, 0, 0, 0, 29, 126, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 146, 1,
0, 0, 0, 35, 154, 1, 0, 0, 0, 37, 162, 1, 0, 0, 0, 39, 169, 1, 0, 0, 0,
41, 179, 1, 0, 0, 0, 43, 182, 1, 0, 0, 0, 45, 186, 1, 0, 0, 0, 47, 190,
1, 0, 0, 0, 49, 193, 1, 0, 0, 0, 51, 197, 1, 0, 0, 0, 53, 204, 1, 0, 0,
0, 55, 220, 1, 0, 0, 0, 57, 222, 1, 0, 0, 0, 59, 272, 1, 0, 0, 0, 61, 294,
1, 0, 0, 0, 63, 296, 1, 0, 0, 0, 65, 303, 1, 0, 0, 0, 67, 306, 1, 0, 0,
0, 69, 310, 1, 0, 0, 0, 71, 321, 1, 0, 0, 0, 73, 327, 1, 0, 0, 0, 75, 330,
1, 0, 0, 0, 77, 78, 5, 40, 0, 0, 78, 2, 1, 0, 0, 0, 79, 80, 5, 41, 0, 0,
80, 4, 1, 0, 0, 0, 81, 82, 5, 91, 0, 0, 82, 6, 1, 0, 0, 0, 83, 84, 5, 93,
0, 0, 84, 8, 1, 0, 0, 0, 85, 86, 5, 44, 0, 0, 86, 10, 1, 0, 0, 0, 87, 91,
5, 61, 0, 0, 88, 89, 5, 61, 0, 0, 89, 91, 5, 61, 0, 0, 90, 87, 1, 0, 0,
0, 90, 88, 1, 0, 0, 0, 91, 12, 1, 0, 0, 0, 92, 93, 5, 33, 0, 0, 93, 94,
5, 61, 0, 0, 94, 14, 1, 0, 0, 0, 95, 96, 5, 60, 0, 0, 96, 97, 5, 62, 0,
0, 97, 16, 1, 0, 0, 0, 98, 99, 5, 60, 0, 0, 99, 18, 1, 0, 0, 0, 100, 101,
5, 60, 0, 0, 101, 102, 5, 61, 0, 0, 102, 20, 1, 0, 0, 0, 103, 104, 5, 62,
0, 0, 104, 22, 1, 0, 0, 0, 105, 106, 5, 62, 0, 0, 106, 107, 5, 61, 0, 0,
107, 24, 1, 0, 0, 0, 108, 109, 7, 0, 0, 0, 109, 110, 7, 1, 0, 0, 110, 111,
7, 2, 0, 0, 111, 112, 7, 3, 0, 0, 112, 26, 1, 0, 0, 0, 113, 114, 7, 4,
0, 0, 114, 115, 7, 5, 0, 0, 115, 117, 7, 6, 0, 0, 116, 118, 7, 7, 0, 0,
117, 116, 1, 0, 0, 0, 118, 119, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119,
120, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 7, 0, 0, 0, 122, 123,
7, 1, 0, 0, 123, 124, 7, 2, 0, 0, 124, 125, 7, 3, 0, 0, 125, 28, 1, 0,
0, 0, 126, 127, 7, 1, 0, 0, 127, 128, 7, 0, 0, 0, 128, 129, 7, 1, 0, 0,
129, 130, 7, 2, 0, 0, 130, 131, 7, 3, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133,
7, 4, 0, 0, 133, 134, 7, 5, 0, 0, 134, 136, 7, 6, 0, 0, 135, 137, 7, 7,
0, 0, 136, 135, 1, 0, 0, 0, 137, 138, 1, 0, 0, 0, 138, 136, 1, 0, 0, 0,
138, 139, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 7, 1, 0, 0, 141,
142, 7, 0, 0, 0, 142, 143, 7, 1, 0, 0, 143, 144, 7, 2, 0, 0, 144, 145,
7, 3, 0, 0, 145, 32, 1, 0, 0, 0, 146, 147, 7, 8, 0, 0, 147, 148, 7, 3,
0, 0, 148, 149, 7, 6, 0, 0, 149, 150, 7, 9, 0, 0, 150, 151, 7, 3, 0, 0,
151, 152, 7, 3, 0, 0, 152, 153, 7, 4, 0, 0, 153, 34, 1, 0, 0, 0, 154, 155,
7, 3, 0, 0, 155, 156, 7, 10, 0, 0, 156, 157, 7, 1, 0, 0, 157, 158, 7, 11,
0, 0, 158, 160, 7, 6, 0, 0, 159, 161, 7, 11, 0, 0, 160, 159, 1, 0, 0, 0,
160, 161, 1, 0, 0, 0, 161, 36, 1, 0, 0, 0, 162, 163, 7, 12, 0, 0, 163,
164, 7, 3, 0, 0, 164, 165, 7, 13, 0, 0, 165, 166, 7, 3, 0, 0, 166, 167,

View File

@@ -51,7 +51,7 @@ func filterqueryParserInit() {
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 1, 33, 213, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 1, 33, 218, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7,
10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15,
2, 16, 7, 16, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 5, 2, 43,
@@ -65,83 +65,86 @@ func filterqueryParserInit() {
6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1,
6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1,
6, 3, 6, 149, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1,
7, 1, 7, 3, 7, 161, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1,
8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 175, 8, 8, 1, 9, 1, 9, 1, 9, 5, 9, 180,
8, 9, 10, 9, 12, 9, 183, 9, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11,
1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 195, 8, 12, 10, 12, 12, 12, 198, 9,
12, 1, 13, 1, 13, 1, 13, 3, 13, 203, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14,
1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 0, 0, 17, 0, 2, 4, 6, 8, 10, 12, 14,
16, 18, 20, 22, 24, 26, 28, 30, 32, 0, 6, 1, 0, 7, 8, 2, 0, 13, 13, 15,
15, 2, 0, 14, 14, 16, 16, 2, 0, 30, 30, 33, 33, 1, 0, 25, 27, 1, 0, 28,
31, 227, 0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0, 0, 4, 39, 1, 0, 0, 0, 6, 47,
1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 70, 1, 0, 0, 0, 12, 148, 1, 0, 0, 0,
14, 160, 1, 0, 0, 0, 16, 174, 1, 0, 0, 0, 18, 176, 1, 0, 0, 0, 20, 184,
1, 0, 0, 0, 22, 186, 1, 0, 0, 0, 24, 191, 1, 0, 0, 0, 26, 202, 1, 0, 0,
0, 28, 204, 1, 0, 0, 0, 30, 208, 1, 0, 0, 0, 32, 210, 1, 0, 0, 0, 34, 35,
3, 2, 1, 0, 35, 36, 5, 0, 0, 1, 36, 1, 1, 0, 0, 0, 37, 38, 3, 4, 2, 0,
38, 3, 1, 0, 0, 0, 39, 44, 3, 6, 3, 0, 40, 41, 5, 24, 0, 0, 41, 43, 3,
6, 3, 0, 42, 40, 1, 0, 0, 0, 43, 46, 1, 0, 0, 0, 44, 42, 1, 0, 0, 0, 44,
45, 1, 0, 0, 0, 45, 5, 1, 0, 0, 0, 46, 44, 1, 0, 0, 0, 47, 53, 3, 8, 4,
0, 48, 49, 5, 23, 0, 0, 49, 52, 3, 8, 4, 0, 50, 52, 3, 8, 4, 0, 51, 48,
1, 0, 0, 0, 51, 50, 1, 0, 0, 0, 52, 55, 1, 0, 0, 0, 53, 51, 1, 0, 0, 0,
53, 54, 1, 0, 0, 0, 54, 7, 1, 0, 0, 0, 55, 53, 1, 0, 0, 0, 56, 58, 5, 22,
0, 0, 57, 56, 1, 0, 0, 0, 57, 58, 1, 0, 0, 0, 58, 59, 1, 0, 0, 0, 59, 60,
3, 10, 5, 0, 60, 9, 1, 0, 0, 0, 61, 62, 5, 1, 0, 0, 62, 63, 3, 4, 2, 0,
63, 64, 5, 2, 0, 0, 64, 71, 1, 0, 0, 0, 65, 71, 3, 12, 6, 0, 66, 71, 3,
22, 11, 0, 67, 71, 3, 20, 10, 0, 68, 71, 3, 32, 16, 0, 69, 71, 3, 30, 15,
0, 70, 61, 1, 0, 0, 0, 70, 65, 1, 0, 0, 0, 70, 66, 1, 0, 0, 0, 70, 67,
1, 0, 0, 0, 70, 68, 1, 0, 0, 0, 70, 69, 1, 0, 0, 0, 71, 11, 1, 0, 0, 0,
72, 73, 3, 32, 16, 0, 73, 74, 5, 6, 0, 0, 74, 75, 3, 30, 15, 0, 75, 149,
1, 0, 0, 0, 76, 77, 3, 32, 16, 0, 77, 78, 7, 0, 0, 0, 78, 79, 3, 30, 15,
0, 79, 149, 1, 0, 0, 0, 80, 81, 3, 32, 16, 0, 81, 82, 5, 9, 0, 0, 82, 83,
3, 30, 15, 0, 83, 149, 1, 0, 0, 0, 84, 85, 3, 32, 16, 0, 85, 86, 5, 10,
0, 0, 86, 87, 3, 30, 15, 0, 87, 149, 1, 0, 0, 0, 88, 89, 3, 32, 16, 0,
89, 90, 5, 11, 0, 0, 90, 91, 3, 30, 15, 0, 91, 149, 1, 0, 0, 0, 92, 93,
3, 32, 16, 0, 93, 94, 5, 12, 0, 0, 94, 95, 3, 30, 15, 0, 95, 149, 1, 0,
0, 0, 96, 97, 3, 32, 16, 0, 97, 98, 7, 1, 0, 0, 98, 99, 3, 30, 15, 0, 99,
149, 1, 0, 0, 0, 100, 101, 3, 32, 16, 0, 101, 102, 7, 2, 0, 0, 102, 103,
3, 30, 15, 0, 103, 149, 1, 0, 0, 0, 104, 105, 3, 32, 16, 0, 105, 106, 5,
17, 0, 0, 106, 107, 3, 30, 15, 0, 107, 108, 5, 23, 0, 0, 108, 109, 3, 30,
15, 0, 109, 149, 1, 0, 0, 0, 110, 111, 3, 32, 16, 0, 111, 112, 5, 22, 0,
0, 112, 113, 5, 17, 0, 0, 113, 114, 3, 30, 15, 0, 114, 115, 5, 23, 0, 0,
115, 116, 3, 30, 15, 0, 116, 149, 1, 0, 0, 0, 117, 118, 3, 32, 16, 0, 118,
119, 3, 14, 7, 0, 119, 149, 1, 0, 0, 0, 120, 121, 3, 32, 16, 0, 121, 122,
3, 16, 8, 0, 122, 149, 1, 0, 0, 0, 123, 124, 3, 32, 16, 0, 124, 125, 5,
18, 0, 0, 125, 149, 1, 0, 0, 0, 126, 127, 3, 32, 16, 0, 127, 128, 5, 22,
0, 0, 128, 129, 5, 18, 0, 0, 129, 149, 1, 0, 0, 0, 130, 131, 3, 32, 16,
0, 131, 132, 5, 19, 0, 0, 132, 133, 3, 30, 15, 0, 133, 149, 1, 0, 0, 0,
134, 135, 3, 32, 16, 0, 135, 136, 5, 22, 0, 0, 136, 137, 5, 19, 0, 0, 137,
138, 3, 30, 15, 0, 138, 149, 1, 0, 0, 0, 139, 140, 3, 32, 16, 0, 140, 141,
5, 20, 0, 0, 141, 142, 3, 30, 15, 0, 142, 149, 1, 0, 0, 0, 143, 144, 3,
32, 16, 0, 144, 145, 5, 22, 0, 0, 145, 146, 5, 20, 0, 0, 146, 147, 3, 30,
15, 0, 147, 149, 1, 0, 0, 0, 148, 72, 1, 0, 0, 0, 148, 76, 1, 0, 0, 0,
148, 80, 1, 0, 0, 0, 148, 84, 1, 0, 0, 0, 148, 88, 1, 0, 0, 0, 148, 92,
1, 0, 0, 0, 148, 96, 1, 0, 0, 0, 148, 100, 1, 0, 0, 0, 148, 104, 1, 0,
0, 0, 148, 110, 1, 0, 0, 0, 148, 117, 1, 0, 0, 0, 148, 120, 1, 0, 0, 0,
148, 123, 1, 0, 0, 0, 148, 126, 1, 0, 0, 0, 148, 130, 1, 0, 0, 0, 148,
134, 1, 0, 0, 0, 148, 139, 1, 0, 0, 0, 148, 143, 1, 0, 0, 0, 149, 13, 1,
0, 0, 0, 150, 151, 5, 21, 0, 0, 151, 152, 5, 1, 0, 0, 152, 153, 3, 18,
9, 0, 153, 154, 5, 2, 0, 0, 154, 161, 1, 0, 0, 0, 155, 156, 5, 21, 0, 0,
156, 157, 5, 3, 0, 0, 157, 158, 3, 18, 9, 0, 158, 159, 5, 4, 0, 0, 159,
161, 1, 0, 0, 0, 160, 150, 1, 0, 0, 0, 160, 155, 1, 0, 0, 0, 161, 15, 1,
0, 0, 0, 162, 163, 5, 22, 0, 0, 163, 164, 5, 21, 0, 0, 164, 165, 5, 1,
0, 0, 165, 166, 3, 18, 9, 0, 166, 167, 5, 2, 0, 0, 167, 175, 1, 0, 0, 0,
168, 169, 5, 22, 0, 0, 169, 170, 5, 21, 0, 0, 170, 171, 5, 3, 0, 0, 171,
172, 3, 18, 9, 0, 172, 173, 5, 4, 0, 0, 173, 175, 1, 0, 0, 0, 174, 162,
1, 0, 0, 0, 174, 168, 1, 0, 0, 0, 175, 17, 1, 0, 0, 0, 176, 181, 3, 30,
15, 0, 177, 178, 5, 5, 0, 0, 178, 180, 3, 30, 15, 0, 179, 177, 1, 0, 0,
0, 180, 183, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182,
19, 1, 0, 0, 0, 183, 181, 1, 0, 0, 0, 184, 185, 7, 3, 0, 0, 185, 21, 1,
0, 0, 0, 186, 187, 7, 4, 0, 0, 187, 188, 5, 1, 0, 0, 188, 189, 3, 24, 12,
0, 189, 190, 5, 2, 0, 0, 190, 23, 1, 0, 0, 0, 191, 196, 3, 26, 13, 0, 192,
193, 5, 5, 0, 0, 193, 195, 3, 26, 13, 0, 194, 192, 1, 0, 0, 0, 195, 198,
1, 0, 0, 0, 196, 194, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 25, 1, 0,
0, 0, 198, 196, 1, 0, 0, 0, 199, 203, 3, 32, 16, 0, 200, 203, 3, 30, 15,
0, 201, 203, 3, 28, 14, 0, 202, 199, 1, 0, 0, 0, 202, 200, 1, 0, 0, 0,
202, 201, 1, 0, 0, 0, 203, 27, 1, 0, 0, 0, 204, 205, 5, 3, 0, 0, 205, 206,
3, 18, 9, 0, 206, 207, 5, 4, 0, 0, 207, 29, 1, 0, 0, 0, 208, 209, 7, 5,
0, 0, 209, 31, 1, 0, 0, 0, 210, 211, 5, 31, 0, 0, 211, 33, 1, 0, 0, 0,
11, 44, 51, 53, 57, 70, 148, 160, 174, 181, 196, 202,
7, 1, 7, 1, 7, 1, 7, 3, 7, 163, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1,
8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 180, 8,
8, 1, 9, 1, 9, 1, 9, 5, 9, 185, 8, 9, 10, 9, 12, 9, 188, 9, 9, 1, 10, 1,
10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 200,
8, 12, 10, 12, 12, 12, 203, 9, 12, 1, 13, 1, 13, 1, 13, 3, 13, 208, 8,
13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 0, 0,
17, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 0, 6,
1, 0, 7, 8, 2, 0, 13, 13, 15, 15, 2, 0, 14, 14, 16, 16, 2, 0, 30, 30, 33,
33, 1, 0, 25, 27, 1, 0, 28, 31, 234, 0, 34, 1, 0, 0, 0, 2, 37, 1, 0, 0,
0, 4, 39, 1, 0, 0, 0, 6, 47, 1, 0, 0, 0, 8, 57, 1, 0, 0, 0, 10, 70, 1,
0, 0, 0, 12, 148, 1, 0, 0, 0, 14, 162, 1, 0, 0, 0, 16, 179, 1, 0, 0, 0,
18, 181, 1, 0, 0, 0, 20, 189, 1, 0, 0, 0, 22, 191, 1, 0, 0, 0, 24, 196,
1, 0, 0, 0, 26, 207, 1, 0, 0, 0, 28, 209, 1, 0, 0, 0, 30, 213, 1, 0, 0,
0, 32, 215, 1, 0, 0, 0, 34, 35, 3, 2, 1, 0, 35, 36, 5, 0, 0, 1, 36, 1,
1, 0, 0, 0, 37, 38, 3, 4, 2, 0, 38, 3, 1, 0, 0, 0, 39, 44, 3, 6, 3, 0,
40, 41, 5, 24, 0, 0, 41, 43, 3, 6, 3, 0, 42, 40, 1, 0, 0, 0, 43, 46, 1,
0, 0, 0, 44, 42, 1, 0, 0, 0, 44, 45, 1, 0, 0, 0, 45, 5, 1, 0, 0, 0, 46,
44, 1, 0, 0, 0, 47, 53, 3, 8, 4, 0, 48, 49, 5, 23, 0, 0, 49, 52, 3, 8,
4, 0, 50, 52, 3, 8, 4, 0, 51, 48, 1, 0, 0, 0, 51, 50, 1, 0, 0, 0, 52, 55,
1, 0, 0, 0, 53, 51, 1, 0, 0, 0, 53, 54, 1, 0, 0, 0, 54, 7, 1, 0, 0, 0,
55, 53, 1, 0, 0, 0, 56, 58, 5, 22, 0, 0, 57, 56, 1, 0, 0, 0, 57, 58, 1,
0, 0, 0, 58, 59, 1, 0, 0, 0, 59, 60, 3, 10, 5, 0, 60, 9, 1, 0, 0, 0, 61,
62, 5, 1, 0, 0, 62, 63, 3, 4, 2, 0, 63, 64, 5, 2, 0, 0, 64, 71, 1, 0, 0,
0, 65, 71, 3, 12, 6, 0, 66, 71, 3, 22, 11, 0, 67, 71, 3, 20, 10, 0, 68,
71, 3, 32, 16, 0, 69, 71, 3, 30, 15, 0, 70, 61, 1, 0, 0, 0, 70, 65, 1,
0, 0, 0, 70, 66, 1, 0, 0, 0, 70, 67, 1, 0, 0, 0, 70, 68, 1, 0, 0, 0, 70,
69, 1, 0, 0, 0, 71, 11, 1, 0, 0, 0, 72, 73, 3, 32, 16, 0, 73, 74, 5, 6,
0, 0, 74, 75, 3, 30, 15, 0, 75, 149, 1, 0, 0, 0, 76, 77, 3, 32, 16, 0,
77, 78, 7, 0, 0, 0, 78, 79, 3, 30, 15, 0, 79, 149, 1, 0, 0, 0, 80, 81,
3, 32, 16, 0, 81, 82, 5, 9, 0, 0, 82, 83, 3, 30, 15, 0, 83, 149, 1, 0,
0, 0, 84, 85, 3, 32, 16, 0, 85, 86, 5, 10, 0, 0, 86, 87, 3, 30, 15, 0,
87, 149, 1, 0, 0, 0, 88, 89, 3, 32, 16, 0, 89, 90, 5, 11, 0, 0, 90, 91,
3, 30, 15, 0, 91, 149, 1, 0, 0, 0, 92, 93, 3, 32, 16, 0, 93, 94, 5, 12,
0, 0, 94, 95, 3, 30, 15, 0, 95, 149, 1, 0, 0, 0, 96, 97, 3, 32, 16, 0,
97, 98, 7, 1, 0, 0, 98, 99, 3, 30, 15, 0, 99, 149, 1, 0, 0, 0, 100, 101,
3, 32, 16, 0, 101, 102, 7, 2, 0, 0, 102, 103, 3, 30, 15, 0, 103, 149, 1,
0, 0, 0, 104, 105, 3, 32, 16, 0, 105, 106, 5, 17, 0, 0, 106, 107, 3, 30,
15, 0, 107, 108, 5, 23, 0, 0, 108, 109, 3, 30, 15, 0, 109, 149, 1, 0, 0,
0, 110, 111, 3, 32, 16, 0, 111, 112, 5, 22, 0, 0, 112, 113, 5, 17, 0, 0,
113, 114, 3, 30, 15, 0, 114, 115, 5, 23, 0, 0, 115, 116, 3, 30, 15, 0,
116, 149, 1, 0, 0, 0, 117, 118, 3, 32, 16, 0, 118, 119, 3, 14, 7, 0, 119,
149, 1, 0, 0, 0, 120, 121, 3, 32, 16, 0, 121, 122, 3, 16, 8, 0, 122, 149,
1, 0, 0, 0, 123, 124, 3, 32, 16, 0, 124, 125, 5, 18, 0, 0, 125, 149, 1,
0, 0, 0, 126, 127, 3, 32, 16, 0, 127, 128, 5, 22, 0, 0, 128, 129, 5, 18,
0, 0, 129, 149, 1, 0, 0, 0, 130, 131, 3, 32, 16, 0, 131, 132, 5, 19, 0,
0, 132, 133, 3, 30, 15, 0, 133, 149, 1, 0, 0, 0, 134, 135, 3, 32, 16, 0,
135, 136, 5, 22, 0, 0, 136, 137, 5, 19, 0, 0, 137, 138, 3, 30, 15, 0, 138,
149, 1, 0, 0, 0, 139, 140, 3, 32, 16, 0, 140, 141, 5, 20, 0, 0, 141, 142,
3, 30, 15, 0, 142, 149, 1, 0, 0, 0, 143, 144, 3, 32, 16, 0, 144, 145, 5,
22, 0, 0, 145, 146, 5, 20, 0, 0, 146, 147, 3, 30, 15, 0, 147, 149, 1, 0,
0, 0, 148, 72, 1, 0, 0, 0, 148, 76, 1, 0, 0, 0, 148, 80, 1, 0, 0, 0, 148,
84, 1, 0, 0, 0, 148, 88, 1, 0, 0, 0, 148, 92, 1, 0, 0, 0, 148, 96, 1, 0,
0, 0, 148, 100, 1, 0, 0, 0, 148, 104, 1, 0, 0, 0, 148, 110, 1, 0, 0, 0,
148, 117, 1, 0, 0, 0, 148, 120, 1, 0, 0, 0, 148, 123, 1, 0, 0, 0, 148,
126, 1, 0, 0, 0, 148, 130, 1, 0, 0, 0, 148, 134, 1, 0, 0, 0, 148, 139,
1, 0, 0, 0, 148, 143, 1, 0, 0, 0, 149, 13, 1, 0, 0, 0, 150, 151, 5, 21,
0, 0, 151, 152, 5, 1, 0, 0, 152, 153, 3, 18, 9, 0, 153, 154, 5, 2, 0, 0,
154, 163, 1, 0, 0, 0, 155, 156, 5, 21, 0, 0, 156, 157, 5, 3, 0, 0, 157,
158, 3, 18, 9, 0, 158, 159, 5, 4, 0, 0, 159, 163, 1, 0, 0, 0, 160, 161,
5, 21, 0, 0, 161, 163, 3, 30, 15, 0, 162, 150, 1, 0, 0, 0, 162, 155, 1,
0, 0, 0, 162, 160, 1, 0, 0, 0, 163, 15, 1, 0, 0, 0, 164, 165, 5, 22, 0,
0, 165, 166, 5, 21, 0, 0, 166, 167, 5, 1, 0, 0, 167, 168, 3, 18, 9, 0,
168, 169, 5, 2, 0, 0, 169, 180, 1, 0, 0, 0, 170, 171, 5, 22, 0, 0, 171,
172, 5, 21, 0, 0, 172, 173, 5, 3, 0, 0, 173, 174, 3, 18, 9, 0, 174, 175,
5, 4, 0, 0, 175, 180, 1, 0, 0, 0, 176, 177, 5, 22, 0, 0, 177, 178, 5, 21,
0, 0, 178, 180, 3, 30, 15, 0, 179, 164, 1, 0, 0, 0, 179, 170, 1, 0, 0,
0, 179, 176, 1, 0, 0, 0, 180, 17, 1, 0, 0, 0, 181, 186, 3, 30, 15, 0, 182,
183, 5, 5, 0, 0, 183, 185, 3, 30, 15, 0, 184, 182, 1, 0, 0, 0, 185, 188,
1, 0, 0, 0, 186, 184, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 19, 1, 0,
0, 0, 188, 186, 1, 0, 0, 0, 189, 190, 7, 3, 0, 0, 190, 21, 1, 0, 0, 0,
191, 192, 7, 4, 0, 0, 192, 193, 5, 1, 0, 0, 193, 194, 3, 24, 12, 0, 194,
195, 5, 2, 0, 0, 195, 23, 1, 0, 0, 0, 196, 201, 3, 26, 13, 0, 197, 198,
5, 5, 0, 0, 198, 200, 3, 26, 13, 0, 199, 197, 1, 0, 0, 0, 200, 203, 1,
0, 0, 0, 201, 199, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 25, 1, 0, 0,
0, 203, 201, 1, 0, 0, 0, 204, 208, 3, 32, 16, 0, 205, 208, 3, 30, 15, 0,
206, 208, 3, 28, 14, 0, 207, 204, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 207,
206, 1, 0, 0, 0, 208, 27, 1, 0, 0, 0, 209, 210, 5, 3, 0, 0, 210, 211, 3,
18, 9, 0, 211, 212, 5, 4, 0, 0, 212, 29, 1, 0, 0, 0, 213, 214, 7, 5, 0,
0, 214, 31, 1, 0, 0, 0, 215, 216, 5, 31, 0, 0, 216, 33, 1, 0, 0, 0, 11,
44, 51, 53, 57, 70, 148, 162, 179, 186, 201, 207,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
@@ -1947,6 +1950,7 @@ type IInClauseContext interface {
RPAREN() antlr.TerminalNode
LBRACK() antlr.TerminalNode
RBRACK() antlr.TerminalNode
Value() IValueContext
// IsInClauseContext differentiates from other interfaces.
IsInClauseContext()
@@ -2020,6 +2024,22 @@ func (s *InClauseContext) RBRACK() antlr.TerminalNode {
return s.GetToken(FilterQueryParserRBRACK, 0)
}
func (s *InClauseContext) Value() IValueContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IValueContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IValueContext)
}
func (s *InClauseContext) GetRuleContext() antlr.RuleContext {
return s
}
@@ -2053,7 +2073,7 @@ func (s *InClauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
func (p *FilterQueryParser) InClause() (localctx IInClauseContext) {
localctx = NewInClauseContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 14, FilterQueryParserRULE_inClause)
p.SetState(160)
p.SetState(162)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@@ -2122,6 +2142,21 @@ func (p *FilterQueryParser) InClause() (localctx IInClauseContext) {
}
}
case 3:
p.EnterOuterAlt(localctx, 3)
{
p.SetState(160)
p.Match(FilterQueryParserIN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(161)
p.Value()
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
@@ -2154,6 +2189,7 @@ type INotInClauseContext interface {
RPAREN() antlr.TerminalNode
LBRACK() antlr.TerminalNode
RBRACK() antlr.TerminalNode
Value() IValueContext
// IsNotInClauseContext differentiates from other interfaces.
IsNotInClauseContext()
@@ -2231,6 +2267,22 @@ func (s *NotInClauseContext) RBRACK() antlr.TerminalNode {
return s.GetToken(FilterQueryParserRBRACK, 0)
}
func (s *NotInClauseContext) Value() IValueContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IValueContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IValueContext)
}
func (s *NotInClauseContext) GetRuleContext() antlr.RuleContext {
return s
}
@@ -2264,7 +2316,7 @@ func (s *NotInClauseContext) Accept(visitor antlr.ParseTreeVisitor) interface{}
func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
localctx = NewNotInClauseContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 16, FilterQueryParserRULE_notInClause)
p.SetState(174)
p.SetState(179)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@@ -2274,7 +2326,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(162)
p.SetState(164)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
@@ -2282,7 +2334,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(163)
p.SetState(165)
p.Match(FilterQueryParserIN)
if p.HasError() {
// Recognition error - abort rule
@@ -2290,7 +2342,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(164)
p.SetState(166)
p.Match(FilterQueryParserLPAREN)
if p.HasError() {
// Recognition error - abort rule
@@ -2298,11 +2350,11 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(165)
p.SetState(167)
p.ValueList()
}
{
p.SetState(166)
p.SetState(168)
p.Match(FilterQueryParserRPAREN)
if p.HasError() {
// Recognition error - abort rule
@@ -2313,7 +2365,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(168)
p.SetState(170)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
@@ -2321,7 +2373,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(169)
p.SetState(171)
p.Match(FilterQueryParserIN)
if p.HasError() {
// Recognition error - abort rule
@@ -2329,7 +2381,7 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(170)
p.SetState(172)
p.Match(FilterQueryParserLBRACK)
if p.HasError() {
// Recognition error - abort rule
@@ -2337,11 +2389,11 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
{
p.SetState(171)
p.SetState(173)
p.ValueList()
}
{
p.SetState(172)
p.SetState(174)
p.Match(FilterQueryParserRBRACK)
if p.HasError() {
// Recognition error - abort rule
@@ -2349,6 +2401,29 @@ func (p *FilterQueryParser) NotInClause() (localctx INotInClauseContext) {
}
}
case 3:
p.EnterOuterAlt(localctx, 3)
{
p.SetState(176)
p.Match(FilterQueryParserNOT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(177)
p.Match(FilterQueryParserIN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(178)
p.Value()
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
@@ -2501,10 +2576,10 @@ func (p *FilterQueryParser) ValueList() (localctx IValueListContext) {
p.EnterOuterAlt(localctx, 1)
{
p.SetState(176)
p.SetState(181)
p.Value()
}
p.SetState(181)
p.SetState(186)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@@ -2513,7 +2588,7 @@ func (p *FilterQueryParser) ValueList() (localctx IValueListContext) {
for _la == FilterQueryParserCOMMA {
{
p.SetState(177)
p.SetState(182)
p.Match(FilterQueryParserCOMMA)
if p.HasError() {
// Recognition error - abort rule
@@ -2521,11 +2596,11 @@ func (p *FilterQueryParser) ValueList() (localctx IValueListContext) {
}
}
{
p.SetState(178)
p.SetState(183)
p.Value()
}
p.SetState(183)
p.SetState(188)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@@ -2638,7 +2713,7 @@ func (p *FilterQueryParser) FullText() (localctx IFullTextContext) {
p.EnterOuterAlt(localctx, 1)
{
p.SetState(184)
p.SetState(189)
_la = p.GetTokenStream().LA(1)
if !(_la == FilterQueryParserQUOTED_TEXT || _la == FilterQueryParserFREETEXT) {
@@ -2786,7 +2861,7 @@ func (p *FilterQueryParser) FunctionCall() (localctx IFunctionCallContext) {
p.EnterOuterAlt(localctx, 1)
{
p.SetState(186)
p.SetState(191)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&234881024) != 0) {
@@ -2797,7 +2872,7 @@ func (p *FilterQueryParser) FunctionCall() (localctx IFunctionCallContext) {
}
}
{
p.SetState(187)
p.SetState(192)
p.Match(FilterQueryParserLPAREN)
if p.HasError() {
// Recognition error - abort rule
@@ -2805,11 +2880,11 @@ func (p *FilterQueryParser) FunctionCall() (localctx IFunctionCallContext) {
}
}
{
p.SetState(188)
p.SetState(193)
p.FunctionParamList()
}
{
p.SetState(189)
p.SetState(194)
p.Match(FilterQueryParserRPAREN)
if p.HasError() {
// Recognition error - abort rule
@@ -2965,10 +3040,10 @@ func (p *FilterQueryParser) FunctionParamList() (localctx IFunctionParamListCont
p.EnterOuterAlt(localctx, 1)
{
p.SetState(191)
p.SetState(196)
p.FunctionParam()
}
p.SetState(196)
p.SetState(201)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@@ -2977,7 +3052,7 @@ func (p *FilterQueryParser) FunctionParamList() (localctx IFunctionParamListCont
for _la == FilterQueryParserCOMMA {
{
p.SetState(192)
p.SetState(197)
p.Match(FilterQueryParserCOMMA)
if p.HasError() {
// Recognition error - abort rule
@@ -2985,11 +3060,11 @@ func (p *FilterQueryParser) FunctionParamList() (localctx IFunctionParamListCont
}
}
{
p.SetState(193)
p.SetState(198)
p.FunctionParam()
}
p.SetState(198)
p.SetState(203)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@@ -3139,7 +3214,7 @@ func (s *FunctionParamContext) Accept(visitor antlr.ParseTreeVisitor) interface{
func (p *FilterQueryParser) FunctionParam() (localctx IFunctionParamContext) {
localctx = NewFunctionParamContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 26, FilterQueryParserRULE_functionParam)
p.SetState(202)
p.SetState(207)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
@@ -3149,21 +3224,21 @@ func (p *FilterQueryParser) FunctionParam() (localctx IFunctionParamContext) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(199)
p.SetState(204)
p.Key()
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(200)
p.SetState(205)
p.Value()
}
case 3:
p.EnterOuterAlt(localctx, 3)
{
p.SetState(201)
p.SetState(206)
p.Array()
}
@@ -3291,7 +3366,7 @@ func (p *FilterQueryParser) Array() (localctx IArrayContext) {
p.EnterRule(localctx, 28, FilterQueryParserRULE_array)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(204)
p.SetState(209)
p.Match(FilterQueryParserLBRACK)
if p.HasError() {
// Recognition error - abort rule
@@ -3299,11 +3374,11 @@ func (p *FilterQueryParser) Array() (localctx IArrayContext) {
}
}
{
p.SetState(205)
p.SetState(210)
p.ValueList()
}
{
p.SetState(206)
p.SetState(211)
p.Match(FilterQueryParserRBRACK)
if p.HasError() {
// Recognition error - abort rule
@@ -3426,7 +3501,7 @@ func (p *FilterQueryParser) Value() (localctx IValueContext) {
p.EnterOuterAlt(localctx, 1)
{
p.SetState(208)
p.SetState(213)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&4026531840) != 0) {
@@ -3535,7 +3610,7 @@ func (p *FilterQueryParser) Key() (localctx IKeyContext) {
p.EnterRule(localctx, 32, FilterQueryParserRULE_key)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(210)
p.SetState(215)
p.Match(FilterQueryParserKEY)
if p.HasError() {
// Recognition error - abort rule

View File

@@ -3,7 +3,10 @@ package querier
import (
"encoding/json"
"net/http"
"runtime/debug"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/types/authtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -11,14 +14,16 @@ import (
)
type API struct {
set factory.ProviderSettings
querier Querier
}
func NewAPI(querier Querier) *API {
return &API{querier: querier}
func NewAPI(set factory.ProviderSettings, querier Querier) *API {
return &API{set: set, querier: querier}
}
func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
ctx := req.Context()
claims, err := authtypes.ClaimsFromContext(ctx)
@@ -33,6 +38,26 @@ func (a *API) QueryRange(rw http.ResponseWriter, req *http.Request) {
return
}
defer func() {
if r := recover(); r != nil {
stackTrace := string(debug.Stack())
queryJSON, _ := json.Marshal(queryRangeRequest)
a.set.Logger.ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
"stacktrace", stackTrace,
)
render.Error(rw, errors.NewInternalf(
errors.CodeInternal,
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
))
}
}()
// Validate the query request
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)

View File

@@ -1,15 +1,24 @@
package querier
import (
"bytes"
"context"
"fmt"
"log/slog"
"sort"
"strings"
"text/template"
"time"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrystore"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
type chSQLQuery struct {
logger *slog.Logger
telemetryStore telemetrystore.TelemetryStore
query qbtypes.ClickHouseQuery
@@ -17,24 +26,29 @@ type chSQLQuery struct {
fromMS uint64
toMS uint64
kind qbtypes.RequestType
vars map[string]qbtypes.VariableItem
}
var _ qbtypes.Query = (*chSQLQuery)(nil)
func newchSQLQuery(
logger *slog.Logger,
telemetryStore telemetrystore.TelemetryStore,
query qbtypes.ClickHouseQuery,
args []any,
tr qbtypes.TimeRange,
kind qbtypes.RequestType,
variables map[string]qbtypes.VariableItem,
) *chSQLQuery {
return &chSQLQuery{
logger: logger,
telemetryStore: telemetryStore,
query: query,
args: args,
fromMS: tr.From,
toMS: tr.To,
kind: kind,
vars: variables,
}
}
@@ -45,6 +59,44 @@ func (q *chSQLQuery) Fingerprint() string {
func (q *chSQLQuery) Window() (uint64, uint64) { return q.fromMS, q.toMS }
// TODO(srikanthccv): cleanup the templating logic
func (q *chSQLQuery) renderVars(query string, vars map[string]qbtypes.VariableItem, start, end uint64) (string, error) {
varsData := map[string]any{}
for k, v := range vars {
varsData[k] = formatValueForCH(v)
}
querybuilder.AssignReservedVars(varsData, start, end)
keys := make([]string, 0, len(varsData))
for k := range varsData {
keys = append(keys, k)
}
sort.Slice(keys, func(i, j int) bool {
return len(keys[i]) > len(keys[j])
})
for _, k := range keys {
query = strings.Replace(query, fmt.Sprintf("{{%s}}", k), fmt.Sprint(varsData[k]), -1)
query = strings.Replace(query, fmt.Sprintf("[[%s]]", k), fmt.Sprint(varsData[k]), -1)
query = strings.Replace(query, fmt.Sprintf("$%s", k), fmt.Sprint(varsData[k]), -1)
}
tmpl := template.New("clickhouse-query")
tmpl, err := tmpl.Parse(query)
if err != nil {
return "", errors.WrapInternalf(err, errors.CodeInternal, "error while replacing template variables")
}
var newQuery bytes.Buffer
// replace go template variables
err = tmpl.Execute(&newQuery, varsData)
if err != nil {
return "", errors.WrapInternalf(err, errors.CodeInternal, "error while replacing template variables")
}
return newQuery.String(), nil
}
func (q *chSQLQuery) Execute(ctx context.Context) (*qbtypes.Result, error) {
totalRows := uint64(0)
@@ -57,7 +109,12 @@ func (q *chSQLQuery) Execute(ctx context.Context) (*qbtypes.Result, error) {
elapsed += p.Elapsed
}))
rows, err := q.telemetryStore.ClickhouseDB().Query(ctx, q.query.Query, q.args...)
query, err := q.renderVars(q.query.Query, q.vars, q.fromMS, q.toMS)
if err != nil {
return nil, err
}
rows, err := q.telemetryStore.ClickhouseDB().Query(ctx, query, q.args...)
if err != nil {
return nil, err
}

181
pkg/querier/format.go Normal file
View File

@@ -0,0 +1,181 @@
package querier
import (
"fmt"
"reflect"
"strings"
)
func toStrArrayString(strs ...string) string {
return fmt.Sprintf("[%s]", strings.Join(strs, ","))
}
// formatValue formats the value to be used in clickhouse query
func formatValueForCH(v any) string {
// if it's pointer convert it to a value
v = getPointerValue(v)
switch x := v.(type) {
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
return fmt.Sprintf("%d", x)
case float32, float64:
return fmt.Sprintf("%f", x)
case string:
return fmt.Sprintf("'%s'", quoteEscapedString(x))
case bool:
return fmt.Sprintf("%v", x)
case []any:
if len(x) == 0 {
return "[]"
}
switch x[0].(type) {
case string:
strs := []string{}
for _, sVal := range x {
strs = append(strs, fmt.Sprintf("'%s'", quoteEscapedString(sVal.(string))))
}
return toStrArrayString(strs...)
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64, float32, float64, bool:
return strings.Join(strings.Fields(fmt.Sprint(x)), ",")
default:
return toStrArrayString()
}
case []string:
strs := []string{}
for _, sVal := range x {
strs = append(strs, fmt.Sprintf("'%s'", quoteEscapedString(sVal)))
}
return toStrArrayString(strs...)
default:
return ""
}
}
func getPointerValue(v any) any {
// Check if the interface value is nil
if v == nil {
return nil
}
// Use reflection to check if the pointer is nil
rv := reflect.ValueOf(v)
if rv.Kind() == reflect.Ptr && rv.IsNil() {
return nil
}
switch x := v.(type) {
case *uint8:
if x == nil {
return nil
}
return *x
case *uint16:
if x == nil {
return nil
}
return *x
case *uint32:
if x == nil {
return nil
}
return *x
case *uint64:
if x == nil {
return nil
}
return *x
case *int:
if x == nil {
return nil
}
return *x
case *int8:
if x == nil {
return nil
}
return *x
case *int16:
if x == nil {
return nil
}
return *x
case *int32:
if x == nil {
return nil
}
return *x
case *int64:
if x == nil {
return nil
}
return *x
case *float32:
if x == nil {
return nil
}
return *x
case *float64:
if x == nil {
return nil
}
return *x
case *string:
if x == nil {
return nil
}
return *x
case *bool:
if x == nil {
return nil
}
return *x
case []any:
values := []any{}
for _, val := range x {
values = append(values, getPointerValue(val))
}
return values
default:
return v
}
}
func quoteEscapedString(str string) string {
// https://clickhouse.com/docs/en/sql-reference/syntax#string
str = strings.ReplaceAll(str, `\`, `\\`)
str = strings.ReplaceAll(str, `'`, `\'`)
return str
}
// formatValueForProm formats the value to be used in promql
func formatValueForProm(v any) string {
switch x := v.(type) {
case int:
return fmt.Sprintf("%d", x)
case float32, float64:
return fmt.Sprintf("%f", x)
case string:
return x
case bool:
return fmt.Sprintf("%v", x)
case []interface{}:
if len(x) == 0 {
return ""
}
switch x[0].(type) {
case string, int, float32, float64, bool:
// list of values joined by | for promql - a value can contain whitespace
var str []string
for _, sVal := range x {
str = append(str, fmt.Sprintf("%v", sVal))
}
return strings.Join(str, "|")
default:
return ""
}
default:
return ""
}
}

View File

@@ -3,8 +3,12 @@ package querier
import (
"context"
"fmt"
"math"
"slices"
"sort"
"strings"
"github.com/SigNoz/govaluate"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -106,12 +110,14 @@ func postProcessBuilderQuery[T any](
q *querier,
result *qbtypes.Result,
query qbtypes.QueryBuilderQuery[T],
_ *qbtypes.QueryRangeRequest,
req *qbtypes.QueryRangeRequest,
) *qbtypes.Result {
// Apply functions
if len(query.Functions) > 0 {
result = q.applyFunctions(result, query.Functions)
step := query.StepInterval.Duration.Milliseconds()
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
result = q.applyFunctions(result, functions)
}
return result
@@ -130,7 +136,9 @@ func postProcessMetricQuery(
}
if len(query.Functions) > 0 {
result = q.applyFunctions(result, query.Functions)
step := query.StepInterval.Duration.Milliseconds()
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
result = q.applyFunctions(result, functions)
}
// Apply reduce to for scalar request type
@@ -220,6 +228,13 @@ func (q *querier) applyFormulas(ctx context.Context, results map[string]*qbtypes
if req.RequestType == qbtypes.RequestTypeTimeSeries {
result := q.processTimeSeriesFormula(ctx, results, formula, req)
if result != nil {
result = q.applySeriesLimit(result, formula.Limit, formula.Order)
results[name] = result
}
} else if req.RequestType == qbtypes.RequestTypeScalar {
result := q.processScalarFormula(ctx, results, formula, req)
if result != nil {
result = q.applySeriesLimit(result, formula.Limit, formula.Order)
results[name] = result
}
}
@@ -233,7 +248,7 @@ func (q *querier) processTimeSeriesFormula(
ctx context.Context,
results map[string]*qbtypes.Result,
formula qbtypes.QueryBuilderFormula,
_ *qbtypes.QueryRangeRequest,
req *qbtypes.QueryRangeRequest,
) *qbtypes.Result {
// Prepare time series data for formula evaluation
timeSeriesData := make(map[string]*qbtypes.TimeSeriesData)
@@ -278,12 +293,185 @@ func (q *querier) processTimeSeriesFormula(
}
if len(formula.Functions) > 0 {
result = q.applyFunctions(result, formula.Functions)
// For formulas, calculate GCD of steps from queries in the expression
step := q.calculateFormulaStep(formula.Expression, req)
functions := q.prepareFillZeroArgsWithStep(formula.Functions, req, step)
result = q.applyFunctions(result, functions)
}
return result
}
func (q *querier) processScalarFormula(
ctx context.Context,
results map[string]*qbtypes.Result,
formula qbtypes.QueryBuilderFormula,
req *qbtypes.QueryRangeRequest,
) *qbtypes.Result {
// conver scalar data to time series format with zero timestamp
// so we can run it through formula evaluator
timeSeriesData := make(map[string]*qbtypes.TimeSeriesData)
for queryName, result := range results {
if scalarData, ok := result.Value.(*qbtypes.ScalarData); ok {
// the scalar results would have just one point so negligible cost
tsData := &qbtypes.TimeSeriesData{
QueryName: scalarData.QueryName,
Aggregations: make([]*qbtypes.AggregationBucket, 0),
}
aggColumns := make(map[int]int) // aggregation index -> column index
for colIdx, col := range scalarData.Columns {
if col.Type == qbtypes.ColumnTypeAggregation {
aggColumns[int(col.AggregationIndex)] = colIdx
}
}
type labeledRowData struct {
labels []*qbtypes.Label
values map[int]float64 // aggregation index -> value
}
rowsByLabels := make(map[string]*labeledRowData)
for _, row := range scalarData.Data {
labels := make([]*qbtypes.Label, 0)
for i, col := range scalarData.Columns {
if col.Type == qbtypes.ColumnTypeGroup && i < len(row) {
l := &qbtypes.Label{
Key: col.TelemetryFieldKey,
Value: getPointerValue(row[i]),
}
labels = append(labels, l)
}
}
labelKey := qbtypes.GetUniqueSeriesKey(labels)
rowData, exists := rowsByLabels[labelKey]
if !exists {
rowData = &labeledRowData{
labels: labels,
values: make(map[int]float64),
}
rowsByLabels[labelKey] = rowData
}
for aggIdx, colIdx := range aggColumns {
if colIdx < len(row) {
if val, ok := toFloat64(row[colIdx]); ok {
rowData.values[aggIdx] = val
} else {
q.logger.WarnContext(ctx, "skipped adding unrecognized value")
}
}
}
}
labelKeys := make([]string, 0, len(rowsByLabels))
for key := range rowsByLabels {
labelKeys = append(labelKeys, key)
}
slices.Sort(labelKeys)
aggIndices := make([]int, 0, len(aggColumns))
for aggIdx := range aggColumns {
aggIndices = append(aggIndices, aggIdx)
}
slices.Sort(aggIndices)
for _, aggIdx := range aggIndices {
colIdx := aggColumns[aggIdx]
bucket := &qbtypes.AggregationBucket{
Index: aggIdx,
Alias: scalarData.Columns[colIdx].Name,
Meta: scalarData.Columns[colIdx].Meta,
Series: make([]*qbtypes.TimeSeries, 0),
}
for _, labelKey := range labelKeys {
rowData := rowsByLabels[labelKey]
if val, exists := rowData.values[aggIdx]; exists {
series := &qbtypes.TimeSeries{
Labels: rowData.labels,
Values: []*qbtypes.TimeSeriesValue{{
Timestamp: 0,
Value: val,
}},
}
bucket.Series = append(bucket.Series, series)
}
}
tsData.Aggregations = append(tsData.Aggregations, bucket)
}
timeSeriesData[queryName] = tsData
}
}
canDefaultZero := make(map[string]bool)
evaluator, err := qbtypes.NewFormulaEvaluator(formula.Expression, canDefaultZero)
if err != nil {
q.logger.ErrorContext(ctx, "failed to create formula evaluator", "error", err, "formula", formula.Name)
return nil
}
formulaSeries, err := evaluator.EvaluateFormula(timeSeriesData)
if err != nil {
q.logger.ErrorContext(ctx, "failed to evaluate formula", "error", err, "formula", formula.Name)
return nil
}
// Convert back to scalar format
scalarResult := &qbtypes.ScalarData{
QueryName: formula.Name,
Columns: make([]*qbtypes.ColumnDescriptor, 0),
Data: make([][]any, 0),
}
if len(formulaSeries) > 0 && len(formulaSeries[0].Labels) > 0 {
for _, label := range formulaSeries[0].Labels {
scalarResult.Columns = append(scalarResult.Columns, &qbtypes.ColumnDescriptor{
TelemetryFieldKey: label.Key,
QueryName: formula.Name,
Type: qbtypes.ColumnTypeGroup,
})
}
}
scalarResult.Columns = append(scalarResult.Columns, &qbtypes.ColumnDescriptor{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "__result"},
QueryName: formula.Name,
AggregationIndex: 0,
Type: qbtypes.ColumnTypeAggregation,
})
for _, series := range formulaSeries {
row := make([]any, len(scalarResult.Columns))
for i, label := range series.Labels {
if i < len(row)-1 {
row[i] = label.Value
}
}
if len(series.Values) > 0 {
row[len(row)-1] = series.Values[0].Value
} else {
row[len(row)-1] = "n/a"
}
scalarResult.Data = append(scalarResult.Data, row)
}
return &qbtypes.Result{
Value: scalarResult,
}
}
// filterDisabledQueries removes results for disabled queries
func (q *querier) filterDisabledQueries(results map[string]*qbtypes.Result, req *qbtypes.QueryRangeRequest) map[string]*qbtypes.Result {
filtered := make(map[string]*qbtypes.Result)
@@ -638,15 +826,91 @@ func compareValues(a, b any) int {
// toFloat64 attempts to convert a value to float64
func toFloat64(v any) (float64, bool) {
switch val := v.(type) {
case float64:
return val, true
case int64:
return float64(val), true
case int:
return float64(val), true
case int32:
return float64(val), true
val := numericAsFloat(getPointerValue(v))
if math.IsNaN(val) {
return 0, false
}
return 0, false
return val, true
}
func gcd(a, b int64) int64 {
if b == 0 {
return a
}
return gcd(b, a%b)
}
// prepareFillZeroArgsWithStep prepares fillZero function arguments with a specific step
func (q *querier) prepareFillZeroArgsWithStep(functions []qbtypes.Function, req *qbtypes.QueryRangeRequest, step int64) []qbtypes.Function {
needsCopy := false
for _, fn := range functions {
if fn.Name == qbtypes.FunctionNameFillZero && len(fn.Args) == 0 {
needsCopy = true
break
}
}
if !needsCopy {
return functions
}
updatedFunctions := make([]qbtypes.Function, len(functions))
copy(updatedFunctions, functions)
for i, fn := range updatedFunctions {
if fn.Name == qbtypes.FunctionNameFillZero && len(fn.Args) == 0 {
fn.Args = []qbtypes.FunctionArg{
{Value: float64(req.Start)},
{Value: float64(req.End)},
{Value: float64(step)},
}
updatedFunctions[i] = fn
}
}
return updatedFunctions
}
// calculateFormulaStep calculates the GCD of steps from queries referenced in the formula
func (q *querier) calculateFormulaStep(expression string, req *qbtypes.QueryRangeRequest) int64 {
parsedExpr, err := govaluate.NewEvaluableExpression(expression)
if err != nil {
return 60000
}
variables := parsedExpr.Vars()
// Extract base query names (e.g., "A" from "A.0" or "A.my_alias")
queryNames := make(map[string]bool)
for _, variable := range variables {
// Split by "." to get the base query name
parts := strings.Split(variable, ".")
if len(parts) > 0 {
queryNames[parts[0]] = true
}
}
var steps []int64
for _, query := range req.CompositeQuery.Queries {
info := getqueryInfo(query.Spec)
if queryNames[info.Name] && info.Step.Duration > 0 {
stepMs := info.Step.Duration.Milliseconds()
if stepMs > 0 {
steps = append(steps, stepMs)
}
}
}
if len(steps) == 0 {
return 60000
}
// Calculate GCD of all steps
result := steps[0]
for i := 1; i < len(steps); i++ {
result = gcd(result, steps[i])
}
return result
}

View File

@@ -1,8 +1,13 @@
package querier
import (
"bytes"
"context"
"fmt"
"log/slog"
"sort"
"strings"
"text/template"
"time"
"github.com/SigNoz/signoz/pkg/errors"
@@ -14,27 +19,36 @@ import (
)
type promqlQuery struct {
logger *slog.Logger
promEngine prometheus.Prometheus
query qbv5.PromQuery
tr qbv5.TimeRange
requestType qbv5.RequestType
vars map[string]qbv5.VariableItem
}
var _ qbv5.Query = (*promqlQuery)(nil)
func newPromqlQuery(
logger *slog.Logger,
promEngine prometheus.Prometheus,
query qbv5.PromQuery,
tr qbv5.TimeRange,
requestType qbv5.RequestType,
variables map[string]qbv5.VariableItem,
) *promqlQuery {
return &promqlQuery{promEngine, query, tr, requestType}
return &promqlQuery{logger, promEngine, query, tr, requestType, variables}
}
func (q *promqlQuery) Fingerprint() string {
query, err := q.renderVars(q.query.Query, q.vars, q.tr.From, q.tr.To)
if err != nil {
q.logger.ErrorContext(context.TODO(), "failed render template variables", "query", q.query.Query)
return ""
}
parts := []string{
"promql",
q.query.Query,
query,
q.query.Step.Duration.String(),
}
@@ -45,23 +59,66 @@ func (q *promqlQuery) Window() (uint64, uint64) {
return q.tr.From, q.tr.To
}
// TODO(srikanthccv): cleanup the templating logic
func (q *promqlQuery) renderVars(query string, vars map[string]qbv5.VariableItem, start, end uint64) (string, error) {
varsData := map[string]any{}
for k, v := range vars {
varsData[k] = formatValueForProm(v)
}
querybuilder.AssignReservedVars(varsData, start, end)
keys := make([]string, 0, len(varsData))
for k := range varsData {
keys = append(keys, k)
}
sort.Slice(keys, func(i, j int) bool {
return len(keys[i]) > len(keys[j])
})
for _, k := range keys {
query = strings.Replace(query, fmt.Sprintf("{{%s}}", k), fmt.Sprint(varsData[k]), -1)
query = strings.Replace(query, fmt.Sprintf("[[%s]]", k), fmt.Sprint(varsData[k]), -1)
query = strings.Replace(query, fmt.Sprintf("$%s", k), fmt.Sprint(varsData[k]), -1)
}
tmpl := template.New("promql-query")
tmpl, err := tmpl.Parse(query)
if err != nil {
return "", errors.WrapInternalf(err, errors.CodeInternal, "error while replacing template variables")
}
var newQuery bytes.Buffer
// replace go template variables
err = tmpl.Execute(&newQuery, varsData)
if err != nil {
return "", errors.WrapInternalf(err, errors.CodeInternal, "error while replacing template variables")
}
return newQuery.String(), nil
}
func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
start := int64(querybuilder.ToNanoSecs(q.tr.From))
end := int64(querybuilder.ToNanoSecs(q.tr.To))
query, err := q.renderVars(q.query.Query, q.vars, q.tr.From, q.tr.To)
if err != nil {
return nil, err
}
qry, err := q.promEngine.Engine().NewRangeQuery(
ctx,
q.promEngine.Storage(),
nil,
q.query.Query,
query,
time.Unix(0, start),
time.Unix(0, end),
q.query.Step.Duration,
)
if err != nil {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query %q", q.query.Query)
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query %q", query)
}
res := qry.Exec(ctx)
@@ -89,7 +146,7 @@ func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
matrix, promErr := res.Matrix()
if promErr != nil {
return nil, errors.WrapInternalf(promErr, errors.CodeInternal, "error getting matrix from promql query %q", q.query.Query)
return nil, errors.WrapInternalf(promErr, errors.CodeInternal, "error getting matrix from promql query %q", query)
}
var series []*qbv5.TimeSeries
@@ -115,7 +172,7 @@ func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
series = append(series, &s)
}
warnings, _ := res.Warnings.AsStrings(q.query.Query, 10, 0)
warnings, _ := res.Warnings.AsStrings(query, 10, 0)
return &qbv5.Result{
Type: q.requestType,

View File

@@ -107,6 +107,11 @@ func adjustTimeRangeForShift[T any](spec qbtypes.QueryBuilderQuery[T], tr qbtype
func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtypes.QueryRangeRequest) (*qbtypes.QueryRangeResponse, error) {
tmplVars := req.Variables
if tmplVars == nil {
tmplVars = make(map[string]qbtypes.VariableItem)
}
// First pass: collect all metric names that need temporality
metricNames := make([]string, 0)
for idx, query := range req.CompositeQuery.Queries {
@@ -185,7 +190,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
if !ok {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid promql query spec %T", query.Spec)
}
promqlQuery := newPromqlQuery(q.promEngine, promQuery, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
promqlQuery := newPromqlQuery(q.logger, q.promEngine, promQuery, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType, tmplVars)
queries[promQuery.Name] = promqlQuery
steps[promQuery.Name] = promQuery.Step
case qbtypes.QueryTypeClickHouseSQL:
@@ -193,20 +198,20 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
if !ok {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid clickhouse query spec %T", query.Spec)
}
chSQLQuery := newchSQLQuery(q.telemetryStore, chQuery, nil, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
chSQLQuery := newchSQLQuery(q.logger, q.telemetryStore, chQuery, nil, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType, tmplVars)
queries[chQuery.Name] = chSQLQuery
case qbtypes.QueryTypeBuilder:
switch spec := query.Spec.(type) {
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
bq := newBuilderQuery(q.telemetryStore, q.traceStmtBuilder, spec, timeRange, req.RequestType, tmplVars)
queries[spec.Name] = bq
steps[spec.Name] = spec.StepInterval
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
bq := newBuilderQuery(q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, tmplVars)
queries[spec.Name] = bq
steps[spec.Name] = spec.StepInterval
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
@@ -219,7 +224,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
}
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, timeRange, req.RequestType, req.Variables)
bq := newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, spec, timeRange, req.RequestType, tmplVars)
queries[spec.Name] = bq
steps[spec.Name] = spec.StepInterval
default:
@@ -398,9 +403,9 @@ func (q *querier) executeWithCache(ctx context.Context, orgID valuer.UUID, query
func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtypes.TimeRange) qbtypes.Query {
switch qt := originalQuery.(type) {
case *promqlQuery:
return newPromqlQuery(q.promEngine, qt.query, timeRange, qt.requestType)
return newPromqlQuery(q.logger, q.promEngine, qt.query, timeRange, qt.requestType, qt.vars)
case *chSQLQuery:
return newchSQLQuery(q.telemetryStore, qt.query, qt.args, timeRange, qt.kind)
return newchSQLQuery(q.logger, q.telemetryStore, qt.query, qt.args, timeRange, qt.kind, qt.vars)
case *builderQuery[qbtypes.TraceAggregation]:
qt.spec.ShiftBy = extractShiftFromBuilderQuery(qt.spec)
adjustedTimeRange := adjustTimeRangeForShift(qt.spec, timeRange, qt.kind)

View File

@@ -124,7 +124,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
LicensingAPI: nooplicensing.NewLicenseAPI(),
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Querier),
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier),
})
if err != nil {
return nil, err

View File

@@ -0,0 +1,153 @@
package querybuilder
import (
"fmt"
"regexp"
"strings"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
type HavingExpressionRewriter struct {
columnMap map[string]string
}
// NewHavingExpressionRewriter creates a new having expression rewriter
func NewHavingExpressionRewriter() *HavingExpressionRewriter {
return &HavingExpressionRewriter{
columnMap: make(map[string]string),
}
}
func (r *HavingExpressionRewriter) RewriteForTraces(expression string, aggregations []qbtypes.TraceAggregation) string {
r.buildTraceColumnMap(aggregations)
return r.rewriteExpression(expression)
}
func (r *HavingExpressionRewriter) RewriteForLogs(expression string, aggregations []qbtypes.LogAggregation) string {
r.buildLogColumnMap(aggregations)
return r.rewriteExpression(expression)
}
func (r *HavingExpressionRewriter) RewriteForMetrics(expression string, aggregations []qbtypes.MetricAggregation) string {
r.buildMetricColumnMap(aggregations)
return r.rewriteExpression(expression)
}
func (r *HavingExpressionRewriter) buildTraceColumnMap(aggregations []qbtypes.TraceAggregation) {
r.columnMap = make(map[string]string)
for idx, agg := range aggregations {
sqlColumn := fmt.Sprintf("__result_%d", idx)
if agg.Alias != "" {
r.columnMap[agg.Alias] = sqlColumn
}
r.columnMap[agg.Expression] = sqlColumn
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
if len(aggregations) == 1 {
r.columnMap["__result"] = sqlColumn
}
}
}
func (r *HavingExpressionRewriter) buildLogColumnMap(aggregations []qbtypes.LogAggregation) {
r.columnMap = make(map[string]string)
for idx, agg := range aggregations {
sqlColumn := fmt.Sprintf("__result_%d", idx)
if agg.Alias != "" {
r.columnMap[agg.Alias] = sqlColumn
}
r.columnMap[agg.Expression] = sqlColumn
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
if len(aggregations) == 1 {
r.columnMap["__result"] = sqlColumn
}
}
}
func (r *HavingExpressionRewriter) buildMetricColumnMap(aggregations []qbtypes.MetricAggregation) {
r.columnMap = make(map[string]string)
for idx, agg := range aggregations {
sqlColumn := "value"
metricName := agg.MetricName
if agg.SpaceAggregation.StringValue() != "" {
r.columnMap[fmt.Sprintf("%s(%s)", agg.SpaceAggregation.StringValue(), metricName)] = sqlColumn
}
if agg.TimeAggregation.StringValue() != "" {
r.columnMap[fmt.Sprintf("%s(%s)", agg.TimeAggregation.StringValue(), metricName)] = sqlColumn
}
if agg.TimeAggregation.StringValue() != "" && agg.SpaceAggregation.StringValue() != "" {
r.columnMap[fmt.Sprintf("%s(%s(%s))", agg.SpaceAggregation.StringValue(), agg.TimeAggregation.StringValue(), metricName)] = sqlColumn
}
if agg.TimeAggregation.StringValue() == "" && agg.SpaceAggregation.StringValue() == "" {
r.columnMap[metricName] = sqlColumn
}
r.columnMap["__result"] = sqlColumn
r.columnMap[fmt.Sprintf("__result%d", idx)] = sqlColumn
}
}
func (r *HavingExpressionRewriter) rewriteExpression(expression string) string {
quotedStrings := make(map[string]string)
quotePattern := regexp.MustCompile(`'[^']*'|"[^"]*"`)
quotedIdx := 0
expression = quotePattern.ReplaceAllStringFunc(expression, func(match string) string {
placeholder := fmt.Sprintf("__QUOTED_%d__", quotedIdx)
quotedStrings[placeholder] = match
quotedIdx++
return placeholder
})
type mapping struct {
from string
to string
}
mappings := make([]mapping, 0, len(r.columnMap))
for from, to := range r.columnMap {
mappings = append(mappings, mapping{from: from, to: to})
}
for i := 0; i < len(mappings); i++ {
for j := i + 1; j < len(mappings); j++ {
if len(mappings[j].from) > len(mappings[i].from) {
mappings[i], mappings[j] = mappings[j], mappings[i]
}
}
}
for _, m := range mappings {
if strings.Contains(m.from, "(") {
// escape special regex characters in the function name
escapedFrom := regexp.QuoteMeta(m.from)
pattern := regexp.MustCompile(`\b` + escapedFrom)
expression = pattern.ReplaceAllString(expression, m.to)
} else {
pattern := regexp.MustCompile(`\b` + regexp.QuoteMeta(m.from) + `\b`)
expression = pattern.ReplaceAllString(expression, m.to)
}
}
for placeholder, original := range quotedStrings {
expression = strings.Replace(expression, placeholder, original, 1)
}
return expression
}

View File

@@ -112,7 +112,7 @@ func (b *resourceFilterStatementBuilder[T]) Build(
return nil, err
}
if err := b.addConditions(ctx, q, start, end, query, keys); err != nil {
if err := b.addConditions(ctx, q, start, end, query, keys, variables); err != nil {
return nil, err
}
@@ -130,6 +130,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
start, end uint64,
query qbtypes.QueryBuilderQuery[T],
keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) error {
// Add filter condition if present
if query.Filter != nil && query.Filter.Expression != "" {
@@ -140,6 +141,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
ConditionBuilder: b.conditionBuilder,
FieldKeys: keys,
SkipFullTextFilter: true,
Variables: variables,
})
if err != nil {

View File

@@ -1,6 +1,9 @@
package querybuilder
import "math"
import (
"fmt"
"math"
)
const (
NsToSeconds = 1000000000
@@ -83,3 +86,19 @@ func MinAllowedStepIntervalForMetric(start, end uint64) uint64 {
// return the nearest lower multiple of 60
return step - step%60
}
func AssignReservedVars(vars map[string]any, start, end uint64) {
start = ToNanoSecs(start)
end = ToNanoSecs(end)
vars["start_timestamp"] = start / 1_000_000_000
vars["end_timestamp"] = end / 1_000_000_000
vars["start_timestamp_ms"] = start / 1_000_000
vars["end_timestamp_ms"] = end / 1_000_000
vars["SIGNOZ_START_TIME"] = start / 1_000_000
vars["SIGNOZ_END_TIME"] = end / 1_000_000
vars["start_timestamp_nano"] = start
vars["end_timestamp_nano"] = end
vars["start_datetime"] = fmt.Sprintf("toDateTime(%d)", start/1_000_000_000)
vars["end_datetime"] = fmt.Sprintf("toDateTime(%d)", end/1_000_000_000)
}

View File

@@ -29,6 +29,7 @@ type filterExpressionVisitor struct {
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
skipResourceFilter bool
skipFullTextFilter bool
variables map[string]qbtypes.VariableItem
}
type FilterExprVisitorOpts struct {
@@ -41,6 +42,7 @@ type FilterExprVisitorOpts struct {
JsonKeyToKey qbtypes.JsonKeyToFieldFunc
SkipResourceFilter bool
SkipFullTextFilter bool
Variables map[string]qbtypes.VariableItem
}
// newFilterExpressionVisitor creates a new filterExpressionVisitor
@@ -55,6 +57,7 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
jsonKeyToKey: opts.JsonKeyToKey,
skipResourceFilter: opts.SkipResourceFilter,
skipFullTextFilter: opts.SkipFullTextFilter,
variables: opts.Variables,
}
}
@@ -320,10 +323,46 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
if ctx.InClause() != nil || ctx.NotInClause() != nil {
var values []any
var retValue any
if ctx.InClause() != nil {
values = v.Visit(ctx.InClause()).([]any)
retValue = v.Visit(ctx.InClause())
} else if ctx.NotInClause() != nil {
values = v.Visit(ctx.NotInClause()).([]any)
retValue = v.Visit(ctx.NotInClause())
}
switch ret := retValue.(type) {
case []any:
values = ret
case any:
values = []any{ret}
}
if len(values) == 1 {
if var_, ok := values[0].(string); ok {
// check if this is a variables
var ok bool
var varItem qbtypes.VariableItem
varItem, ok = v.variables[var_]
// if not present, try without `$` prefix
if !ok {
varItem, ok = v.variables[var_[1:]]
}
if ok {
// we have a variable, now check for dynamic variable
if varItem.Type == qbtypes.DynamicVariableType {
// check if it is special value to skip entire filter, if so skip it
if all_, ok := varItem.Value.(string); ok && all_ == "__all__" {
return ""
}
}
switch varValues := varItem.Value.(type) {
case []any:
values = varValues
case any:
values = []any{varValues}
}
}
}
}
op := qbtypes.FilterOperatorIn
@@ -378,6 +417,26 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
if len(values) > 0 {
value := v.Visit(values[0])
if var_, ok := value.(string); ok {
// check if this is a variables
var ok bool
var varItem qbtypes.VariableItem
varItem, ok = v.variables[var_]
// if not present, try without `$` prefix
if !ok {
varItem, ok = v.variables[var_[1:]]
}
if ok {
switch varValues := varItem.Value.(type) {
case []any:
value = varValues[0]
case any:
value = varValues
}
}
}
var op qbtypes.FilterOperator
// Handle each type of comparison
@@ -433,12 +492,18 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
// VisitInClause handles IN expressions
func (v *filterExpressionVisitor) VisitInClause(ctx *grammar.InClauseContext) any {
return v.Visit(ctx.ValueList())
if ctx.ValueList() != nil {
return v.Visit(ctx.ValueList())
}
return v.Visit(ctx.Value())
}
// VisitNotInClause handles NOT IN expressions
func (v *filterExpressionVisitor) VisitNotInClause(ctx *grammar.NotInClauseContext) any {
return v.Visit(ctx.ValueList())
if ctx.ValueList() != nil {
return v.Visit(ctx.ValueList())
}
return v.Visit(ctx.Value())
}
// VisitValueList handles comma-separated value lists

View File

@@ -35,18 +35,6 @@ func TestConditionFor(t *testing.T) {
expectedArgs: []any{"error message"},
expectedError: nil,
},
{
name: "Not Equal operator - timestamp",
key: telemetrytypes.TelemetryFieldKey{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorNotEqual,
value: uint64(1617979338000000000),
expectedSQL: "timestamp <> ?",
expectedArgs: []any{uint64(1617979338000000000)},
expectedError: nil,
},
{
name: "Greater Than operator - number attribute",
key: telemetrytypes.TelemetryFieldKey{
@@ -73,30 +61,6 @@ func TestConditionFor(t *testing.T) {
expectedArgs: []any{float64(1024), true},
expectedError: nil,
},
{
name: "Greater Than Or Equal operator - timestamp",
key: telemetrytypes.TelemetryFieldKey{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorGreaterThanOrEq,
value: uint64(1617979338000000000),
expectedSQL: "timestamp >= ?",
expectedArgs: []any{uint64(1617979338000000000)},
expectedError: nil,
},
{
name: "Less Than Or Equal operator - timestamp",
key: telemetrytypes.TelemetryFieldKey{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorLessThanOrEq,
value: uint64(1617979338000000000),
expectedSQL: "timestamp <= ?",
expectedArgs: []any{uint64(1617979338000000000)},
expectedError: nil,
},
{
name: "Like operator - body",
key: telemetrytypes.TelemetryFieldKey{
@@ -160,52 +124,6 @@ func TestConditionFor(t *testing.T) {
expectedArgs: []any{"%admin%", true},
expectedError: nil,
},
{
name: "Between operator - timestamp",
key: telemetrytypes.TelemetryFieldKey{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorBetween,
value: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedSQL: "timestamp BETWEEN ? AND ?",
expectedArgs: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedError: nil,
},
{
name: "Between operator - invalid value",
key: telemetrytypes.TelemetryFieldKey{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorBetween,
value: "invalid",
expectedSQL: "",
expectedError: qbtypes.ErrBetweenValues,
},
{
name: "Between operator - insufficient values",
key: telemetrytypes.TelemetryFieldKey{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorBetween,
value: []any{uint64(1617979338000000000)},
expectedSQL: "",
expectedError: qbtypes.ErrBetweenValues,
},
{
name: "Not Between operator - timestamp",
key: telemetrytypes.TelemetryFieldKey{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorNotBetween,
value: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedSQL: "timestamp NOT BETWEEN ? AND ?",
expectedArgs: []any{uint64(1617979338000000000), uint64(1617979348000000000)},
expectedError: nil,
},
{
name: "In operator - severity_text",
key: telemetrytypes.TelemetryFieldKey{
@@ -263,17 +181,6 @@ func TestConditionFor(t *testing.T) {
expectedSQL: "true",
expectedError: nil,
},
{
name: "Exists operator - number field",
key: telemetrytypes.TelemetryFieldKey{
Name: "timestamp",
FieldContext: telemetrytypes.FieldContextLog,
},
operator: qbtypes.FilterOperatorExists,
value: nil,
expectedSQL: "true",
expectedError: nil,
},
{
name: "Exists operator - map field",
key: telemetrytypes.TelemetryFieldKey{

View File

@@ -10,5 +10,7 @@ var (
FieldDataType: telemetrytypes.FieldDataTypeString,
}
BodyJSONStringSearchPrefix = `body.`
IntrinsicFields = []string{"timestamp", "body", "trace_id", "span_id", "trace_flags", "severity_text", "severity_number"}
IntrinsicFields = []string{
"body", "trace_id", "span_id", "trace_flags", "severity_text", "severity_number", "scope_name", "scope_version",
}
)

View File

@@ -155,7 +155,7 @@ func (b *logQueryStatementBuilder) buildListQuery(
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// Add filter conditions
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
return nil, err
}
@@ -249,7 +249,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
}
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
return nil, err
}
@@ -275,7 +275,10 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
// Group by all dimensions
sb.GroupBy("ALL")
if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression)
// Rewrite having expression to use SQL column names
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
@@ -289,7 +292,9 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
} else {
sb.GroupBy("ALL")
if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression)
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
@@ -369,7 +374,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
sb.From(fmt.Sprintf("%s.%s", DBName, LogsV2TableName))
// Add filter conditions
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
return nil, err
}
@@ -379,7 +384,9 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
// Add having clause if needed
if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression)
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
// Add order by
@@ -423,6 +430,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) ([]string, error) {
var filterWhereClause *sqlbuilder.WhereClause
@@ -439,6 +447,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
FullTextColumn: b.fullTextColumn,
JsonBodyPrefix: b.jsonBodyPrefix,
JsonKeyToKey: b.jsonKeyToKey,
Variables: variables,
})
if err != nil {

View File

@@ -0,0 +1,78 @@
package telemetrylogs
import (
"testing"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/stretchr/testify/assert"
)
func TestHavingExpressionRewriter_LogQueries(t *testing.T) {
tests := []struct {
name string
havingExpression string
aggregations []qbtypes.LogAggregation
expectedExpression string
}{
{
name: "single aggregation with alias",
havingExpression: "total_logs > 1000",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: "total_logs"},
},
expectedExpression: "__result_0 > 1000",
},
{
name: "multiple aggregations with complex expression",
havingExpression: "(total > 100 AND avg_duration < 500) OR total > 10000",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: "total"},
{Expression: "avg(duration)", Alias: "avg_duration"},
},
expectedExpression: "(__result_0 > 100 AND __result_1 < 500) OR __result_0 > 10000",
},
{
name: "__result reference for single aggregation",
havingExpression: "__result > 500",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: ""},
},
expectedExpression: "__result_0 > 500",
},
{
name: "expression reference",
havingExpression: "sum(bytes) > 1024000",
aggregations: []qbtypes.LogAggregation{
{Expression: "sum(bytes)", Alias: ""},
},
expectedExpression: "__result_0 > 1024000",
},
{
name: "__result{number} format",
havingExpression: "__result0 > 100 AND __result1 < 1000",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: ""},
{Expression: "sum(bytes)", Alias: ""},
},
expectedExpression: "__result_0 > 100 AND __result_1 < 1000",
},
{
name: "mixed aliases and expressions",
havingExpression: "error_count > 10 AND count() < 1000",
aggregations: []qbtypes.LogAggregation{
{Expression: "count()", Alias: ""},
{Expression: "countIf(level='error')", Alias: "error_count"},
},
expectedExpression: "__result_1 > 10 AND __result_0 < 1000",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
rewriter := querybuilder.NewHavingExpressionRewriter()
result := rewriter.RewriteForLogs(tt.havingExpression, tt.aggregations)
assert.Equal(t, tt.expectedExpression, result)
})
}
}

View File

@@ -4,11 +4,14 @@ import (
"context"
"fmt"
"log/slog"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/telemetrytraces"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -128,6 +131,8 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
END as priority`).From(t.tracesDBName + "." + t.tracesFieldsTblName)
var limit int
searchTexts := []string{}
conds := []string{}
for _, fieldKeySelector := range fieldKeySelectors {
@@ -146,6 +151,8 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
fieldKeyConds = append(fieldKeyConds, sb.Like("tag_key", "%"+fieldKeySelector.Name+"%"))
}
searchTexts = append(searchTexts, fieldKeySelector.Name)
// now look at the field context
if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified {
fieldKeyConds = append(fieldKeyConds, sb.E("tag_type", fieldKeySelector.FieldContext.TagType()))
@@ -207,6 +214,28 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
return nil, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTracesKeys.Error())
}
staticKeys := []string{"isRoot", "isEntrypoint"}
staticKeys = append(staticKeys, telemetrytraces.IntrinsicFields...)
staticKeys = append(staticKeys, telemetrytraces.CalculatedFields...)
// add matching intrinsic and matching calculated fields
for _, key := range staticKeys {
found := false
for _, v := range searchTexts {
if v == "" || strings.Contains(key, v) {
found = true
break
}
}
if found {
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
Name: key,
FieldContext: telemetrytypes.FieldContextSpan,
Signal: telemetrytypes.SignalTraces,
})
}
}
return keys, nil
}
@@ -258,6 +287,8 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
var limit int
conds := []string{}
searchTexts := []string{}
for _, fieldKeySelector := range fieldKeySelectors {
if fieldKeySelector.StartUnixMilli != 0 {
@@ -274,6 +305,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
} else {
fieldKeyConds = append(fieldKeyConds, sb.Like("tag_key", "%"+fieldKeySelector.Name+"%"))
}
searchTexts = append(searchTexts, fieldKeySelector.Name)
// now look at the field context
if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified {
@@ -335,6 +367,27 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
return nil, errors.Wrapf(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetLogsKeys.Error())
}
staticKeys := []string{}
staticKeys = append(staticKeys, telemetrylogs.IntrinsicFields...)
// add matching intrinsic and matching calculated fields
for _, key := range staticKeys {
found := false
for _, v := range searchTexts {
if v == "" || strings.Contains(key, v) {
found = true
break
}
}
if found {
keys = append(keys, &telemetrytypes.TelemetryFieldKey{
Name: key,
FieldContext: telemetrytypes.FieldContextLog,
Signal: telemetrytypes.SignalLogs,
})
}
}
return keys, nil
}

View File

@@ -84,7 +84,7 @@ func (b *metricQueryStatementBuilder) Build(
return nil, err
}
return b.buildPipelineStatement(ctx, start, end, query, keys)
return b.buildPipelineStatement(ctx, start, end, query, keys, variables)
}
// Fastpath (no fingerprint grouping)
@@ -140,6 +140,7 @@ func (b *metricQueryStatementBuilder) buildPipelineStatement(
start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
var (
cteFragments []string
@@ -179,13 +180,14 @@ func (b *metricQueryStatementBuilder) buildPipelineStatement(
// time_series_cte
// this is applicable for all the queries
if timeSeriesCTE, timeSeriesCTEArgs, err = b.buildTimeSeriesCTE(ctx, start, end, query, keys); err != nil {
if timeSeriesCTE, timeSeriesCTEArgs, err = b.buildTimeSeriesCTE(ctx, start, end, query, keys, variables); err != nil {
return nil, err
}
if b.canShortCircuitDelta(query) {
// spatial_aggregation_cte directly for certain delta queries
if frag, args := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs); frag != "" {
frag, args := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -199,7 +201,8 @@ func (b *metricQueryStatementBuilder) buildPipelineStatement(
}
// spatial_aggregation_cte
if frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); frag != "" {
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -266,6 +269,7 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE(
start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (string, []any, error) {
sb := sqlbuilder.NewSelectBuilder()
@@ -278,6 +282,7 @@ func (b *metricQueryStatementBuilder) buildTimeSeriesCTE(
ConditionBuilder: b.cb,
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
})
if err != nil {
return "", nil, err
@@ -499,9 +504,19 @@ func (b *metricQueryStatementBuilder) buildFinalSelect(
sb.GroupBy(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
} else {
sb.Select("*")
sb.From("__spatial_aggregation_cte")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
sb.Where(rewrittenExpr)
}
}
q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse)

View File

@@ -129,7 +129,10 @@ func (c *conditionBuilder) conditionFor(
// key membership checks, so depending on the column type, the condition changes
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
// if the field is intrinsic, it always exists
if slices.Contains(IntrinsicFields, tblFieldName) || slices.Contains(CalculatedFields, tblFieldName) {
if slices.Contains(IntrinsicFields, tblFieldName) ||
slices.Contains(CalculatedFields, tblFieldName) ||
slices.Contains(IntrinsicFieldsDeprecated, tblFieldName) ||
slices.Contains(CalculatedFieldsDeprecated, tblFieldName) {
return "true", nil
}
@@ -202,7 +205,10 @@ func (c *conditionBuilder) ConditionFor(
if operator.AddDefaultExistsFilter() {
// skip adding exists filter for intrinsic fields
field, _ := c.fm.FieldFor(ctx, key)
if slices.Contains(IntrinsicFields, field) || slices.Contains(CalculatedFields, field) {
if slices.Contains(IntrinsicFields, field) ||
slices.Contains(IntrinsicFieldsDeprecated, field) ||
slices.Contains(CalculatedFields, field) ||
slices.Contains(CalculatedFieldsDeprecated, field) {
return condition, nil
}

View File

@@ -2,7 +2,6 @@ package telemetrytraces
var (
IntrinsicFields = []string{
"timestamp",
"trace_id",
"span_id",
"trace_state",
@@ -15,6 +14,8 @@ var (
"status_code",
"status_message",
"status_code_string",
}
IntrinsicFieldsDeprecated = []string{
"traceID",
"spanID",
"parentSpanID",
@@ -36,7 +37,9 @@ var (
"db_operation",
"has_error",
"is_remote",
}
CalculatedFieldsDeprecated = []string{
"responseStatusCode",
"externalHttpUrl",
"httpUrl",

View File

@@ -98,7 +98,7 @@ func (b *traceQueryStatementBuilder) Build(
case qbtypes.RequestTypeTimeSeries:
return b.buildTimeSeriesQuery(ctx, q, query, start, end, keys, variables)
case qbtypes.RequestTypeScalar:
return b.buildScalarQuery(ctx, q, query, start, end, keys, false, variables)
return b.buildScalarQuery(ctx, q, query, start, end, keys, variables, false, false)
}
return nil, fmt.Errorf("unsupported request type: %s", requestType)
@@ -195,7 +195,7 @@ func (b *traceQueryStatementBuilder) buildListQuery(
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
// Add filter conditions
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
return nil, err
}
@@ -289,7 +289,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
}
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
return nil, err
}
@@ -300,7 +300,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
if query.Limit > 0 && len(query.GroupBy) > 0 {
// build the scalar “top/bottom-N” query in its own builder.
cteSB := sqlbuilder.NewSelectBuilder()
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true, variables)
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, variables, true, true)
if err != nil {
return nil, err
}
@@ -315,7 +315,9 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
// Group by all dimensions
sb.GroupBy("ALL")
if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression)
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
@@ -328,7 +330,9 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
} else {
sb.GroupBy("ALL")
if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression)
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
@@ -353,8 +357,9 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
skipResourceCTE bool,
variables map[string]qbtypes.VariableItem,
skipResourceCTE bool,
skipHaving bool,
) (*qbtypes.Statement, error) {
var (
@@ -406,7 +411,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
// Add filter conditions
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys)
warnings, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
return nil, err
}
@@ -415,8 +420,10 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
sb.GroupBy("ALL")
// Add having clause if needed
if query.Having != nil && query.Having.Expression != "" {
sb.Having(query.Having.Expression)
if query.Having != nil && query.Having.Expression != "" && !skipHaving {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr := rewriter.RewriteForTraces(query.Having.Expression, query.Aggregations)
sb.Having(rewrittenExpr)
}
// Add order by
@@ -460,6 +467,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation],
keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) ([]string, error) {
var filterWhereClause *sqlbuilder.WhereClause
@@ -473,6 +481,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
ConditionBuilder: b.cb,
FieldKeys: keys,
SkipResourceFilter: true,
Variables: variables,
})
if err != nil {