mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-07 02:12:11 +00:00
Compare commits
14 Commits
multiselec
...
feat/ha
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ce48733f4a | ||
|
|
900877a5bb | ||
|
|
c63667c0e7 | ||
|
|
ed597f00c0 | ||
|
|
4957d3ae93 | ||
|
|
8835e3493d | ||
|
|
027a1631ef | ||
|
|
d7a6607a25 | ||
|
|
7a58bc58c9 | ||
|
|
88be23c3e3 | ||
|
|
8f095dfbc9 | ||
|
|
72207691a3 | ||
|
|
8998ca652e | ||
|
|
f4ae5f19ff |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -54,6 +54,7 @@ ee/query-service/tests/test-deploy/data/
|
||||
bin/
|
||||
.local/
|
||||
*/query-service/queries.active
|
||||
ee/query-service/db
|
||||
|
||||
# e2e
|
||||
|
||||
|
||||
@@ -313,6 +313,9 @@ func (p *BaseSeasonalProvider) getScore(
|
||||
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *v3.Series, value float64, idx int,
|
||||
) float64 {
|
||||
expectedValue := p.getExpectedValue(series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries, idx)
|
||||
if expectedValue < 0 {
|
||||
expectedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||
}
|
||||
return (value - expectedValue) / p.getStdDev(weekSeries)
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,8 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/license"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -21,6 +23,7 @@ import (
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -54,6 +57,7 @@ type APIHandler struct {
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(signoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
|
||||
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
@@ -71,6 +75,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
UseTraceNewSchema: opts.UseTraceNewSchema,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
Signoz: signoz,
|
||||
Preference: preference,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -5,21 +5,20 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
basechr "github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
)
|
||||
|
||||
type ClickhouseReader struct {
|
||||
conn clickhouse.Conn
|
||||
appdb *sqlx.DB
|
||||
appdb sqlstore.SQLStore
|
||||
*basechr.ClickHouseReader
|
||||
}
|
||||
|
||||
func NewDataConnector(
|
||||
localDB *sqlx.DB,
|
||||
sqlDB sqlstore.SQLStore,
|
||||
ch clickhouse.Conn,
|
||||
promConfigPath string,
|
||||
lm interfaces.FeatureLookup,
|
||||
@@ -29,10 +28,10 @@ func NewDataConnector(
|
||||
fluxIntervalForTraceDetail time.Duration,
|
||||
cache cache.Cache,
|
||||
) *ClickhouseReader {
|
||||
chReader := basechr.NewReader(localDB, ch, promConfigPath, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
chReader := basechr.NewReader(sqlDB, ch, promConfigPath, lm, cluster, useLogsNewSchema, useTraceNewSchema, fluxIntervalForTraceDetail, cache)
|
||||
return &ClickhouseReader{
|
||||
conn: ch,
|
||||
appdb: localDB,
|
||||
appdb: sqlDB,
|
||||
ClickHouseReader: chReader,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,7 +43,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/cache"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
@@ -116,10 +115,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := preferences.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -146,7 +141,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
var reader interfaces.DataConnector
|
||||
qb := db.NewDataConnector(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore.ClickHouseDB(),
|
||||
serverOptions.PromConfigPath,
|
||||
lm,
|
||||
@@ -196,10 +191,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
// initiate opamp
|
||||
_, err = opAmpModel.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opAmpModel.InitDB(serverOptions.SigNoz.SQLStore)
|
||||
|
||||
integrationsController, err := integrations.NewController(serverOptions.SigNoz.SQLStore)
|
||||
if err != nil {
|
||||
@@ -225,7 +217,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
// initiate agent config handler
|
||||
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
|
||||
DB: serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
Store: serverOptions.SigNoz.SQLStore,
|
||||
AgentFeatures: []agentConf.AgentFeature{logParsingPipelineController},
|
||||
})
|
||||
if err != nil {
|
||||
|
||||
@@ -7,12 +7,14 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
"github.com/SigNoz/signoz/pkg/config/fileprovider"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/auth"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
|
||||
@@ -94,12 +96,17 @@ func main() {
|
||||
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
sqlStoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlStoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
zap.L().Fatal("Failed to add postgressqlstore factory", zap.Error(err))
|
||||
}
|
||||
|
||||
signoz, err := signoz.New(
|
||||
context.Background(),
|
||||
config,
|
||||
signoz.NewCacheProviderFactories(),
|
||||
signoz.NewWebProviderFactories(),
|
||||
signoz.NewSQLStoreProviderFactories(),
|
||||
sqlStoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -2,11 +2,29 @@ package postgressqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
Identity = "id"
|
||||
Integer = "bigint"
|
||||
Text = "text"
|
||||
)
|
||||
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
)
|
||||
|
||||
type dialect struct {
|
||||
}
|
||||
|
||||
@@ -103,7 +121,7 @@ func (dialect *dialect) GetColumnType(ctx context.Context, bun bun.IDB, table st
|
||||
|
||||
err := bun.NewSelect().
|
||||
ColumnExpr("data_type").
|
||||
TableExpr("information_schema.columns").
|
||||
TableExpr("").
|
||||
Where("table_name = ?", table).
|
||||
Where("column_name = ?", column).
|
||||
Scan(ctx, &columnType)
|
||||
@@ -130,6 +148,22 @@ func (dialect *dialect) ColumnExists(ctx context.Context, bun bun.IDB, table str
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) IndexExists(ctx context.Context, bun bun.IDB, table string, index string) (bool, error) {
|
||||
var count int
|
||||
err := bun.NewSelect().
|
||||
ColumnExpr("COUNT(*)").
|
||||
TableExpr("pg_indexes").
|
||||
Where("tablename = ?", table).
|
||||
Where("indexname = ?", index).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table string, oldColumnName string, newColumnName string) (bool, error) {
|
||||
oldColumnExists, err := dialect.ColumnExists(ctx, bun, table, oldColumnName)
|
||||
if err != nil {
|
||||
@@ -174,7 +208,10 @@ func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table inte
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -183,10 +220,18 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
@@ -209,3 +254,115 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if columnType == Text {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
identityExists, err := dialect.ColumnExists(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if identityExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -207,7 +207,7 @@ export const PasswordReset = Loadable(
|
||||
export const SomethingWentWrong = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "SomethingWentWrong" */ 'pages/SomethingWentWrong'
|
||||
/* webpackChunkName: "ErrorBoundaryFallback" */ 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -299,10 +299,3 @@ export const MetricsExplorer = Loadable(
|
||||
export const ApiMonitoring = Loadable(
|
||||
() => import(/* webpackChunkName: "ApiMonitoring" */ 'pages/ApiMonitoring'),
|
||||
);
|
||||
|
||||
export const DynamicVariableTest = Loadable(
|
||||
() =>
|
||||
import(
|
||||
/* webpackChunkName: "DynamicVariableTest" */ 'pages/DynamicVariableTest'
|
||||
),
|
||||
);
|
||||
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
CustomDomainSettings,
|
||||
DashboardPage,
|
||||
DashboardWidget,
|
||||
DynamicVariableTest,
|
||||
EditAlertChannelsAlerts,
|
||||
EditRulesPage,
|
||||
ErrorDetails,
|
||||
@@ -506,13 +505,6 @@ const routes: AppRoutes[] = [
|
||||
key: 'API_MONITORING',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
path: ROUTES.DYNAMIC_VARIABLE_TEST,
|
||||
exact: true,
|
||||
component: DynamicVariableTest,
|
||||
key: 'DYNAMIC_VARIABLE_TEST',
|
||||
isPrivate: true,
|
||||
},
|
||||
];
|
||||
|
||||
export const SUPPORT_ROUTE: AppRoutes = {
|
||||
|
||||
@@ -18,6 +18,7 @@ function CopyClipboardHOC({
|
||||
|
||||
notifications.success({
|
||||
message: notificationMessage,
|
||||
key: notificationMessage,
|
||||
});
|
||||
}
|
||||
}, [value, notifications, entityKey]);
|
||||
|
||||
@@ -1,231 +0,0 @@
|
||||
.multi-select-container {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto,
|
||||
'Helvetica Neue', Arial, sans-serif;
|
||||
}
|
||||
|
||||
.multi-select-label {
|
||||
margin-bottom: 4px;
|
||||
font-size: 14px;
|
||||
color: rgba(0, 0, 0, 0.85);
|
||||
}
|
||||
|
||||
.multi-select-input {
|
||||
width: 100%;
|
||||
min-height: 40px;
|
||||
border: 1px solid #d9d9d9;
|
||||
border-radius: 6px;
|
||||
padding: 4px 8px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
cursor: text;
|
||||
background-color: #fff;
|
||||
transition: all 0.3s;
|
||||
|
||||
&:hover {
|
||||
border-color: #40a9ff;
|
||||
}
|
||||
|
||||
&:focus,
|
||||
&.multi-select-input-focused {
|
||||
border-color: #40a9ff;
|
||||
box-shadow: 0 0 0 2px rgba(24, 144, 255, 0.2);
|
||||
outline: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-chips {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
width: 100%;
|
||||
gap: 4px;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.multi-select-chip {
|
||||
background-color: #f0f0f0;
|
||||
border-radius: 4px;
|
||||
padding: 2px 8px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
max-width: 200px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
font-size: 14px;
|
||||
line-height: 22px;
|
||||
|
||||
.multi-select-chip-remove {
|
||||
cursor: pointer;
|
||||
font-size: 12px;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
|
||||
&:hover {
|
||||
color: #ff4d4f;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-search {
|
||||
flex: 1;
|
||||
min-width: 50px;
|
||||
border: none;
|
||||
outline: none;
|
||||
background: transparent;
|
||||
|
||||
&:focus {
|
||||
border: none;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
// Override Ant Design's styles
|
||||
.ant-input {
|
||||
background: transparent;
|
||||
|
||||
&:focus {
|
||||
box-shadow: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-clear-all {
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 4px;
|
||||
cursor: pointer;
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
&:hover {
|
||||
color: #ff4d4f;
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-dropdown {
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
max-height: 400px;
|
||||
border: 1px solid #d9d9d9;
|
||||
border-radius: 6px;
|
||||
margin-top: 4px;
|
||||
background-color: #fff;
|
||||
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.15);
|
||||
z-index: 1050;
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.multi-select-option {
|
||||
padding: 8px 12px;
|
||||
cursor: pointer;
|
||||
|
||||
&:hover {
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-divider {
|
||||
height: 1px;
|
||||
background-color: #f0f0f0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.multi-select-section-label {
|
||||
padding: 8px 12px;
|
||||
font-weight: 500;
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
font-size: 12px;
|
||||
background-color: #fafafa;
|
||||
}
|
||||
|
||||
.multi-select-loading {
|
||||
padding: 12px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.multi-select-no-results {
|
||||
padding: 12px;
|
||||
text-align: center;
|
||||
color: rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
.multi-select-options-container,
|
||||
.multi-select-section-content {
|
||||
overflow-y: auto;
|
||||
|
||||
/* For WebKit browsers */
|
||||
&::-webkit-scrollbar {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: #f0f0f0;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: #ccc;
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb:hover {
|
||||
background: #aaa;
|
||||
}
|
||||
|
||||
/* For Firefox */
|
||||
scrollbar-width: thin;
|
||||
scrollbar-color: #ccc #f0f0f0;
|
||||
}
|
||||
|
||||
.multi-select-error {
|
||||
border-color: #ff4d4f;
|
||||
|
||||
&:hover,
|
||||
&:focus,
|
||||
&.multi-select-input-focused {
|
||||
border-color: #ff7875;
|
||||
box-shadow: 0 0 0 2px rgba(255, 77, 79, 0.2);
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-error-text {
|
||||
color: #ff4d4f;
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
.multi-select-disabled {
|
||||
.multi-select-input {
|
||||
background-color: #f5f5f5;
|
||||
cursor: not-allowed;
|
||||
color: rgba(0, 0, 0, 0.25);
|
||||
border-color: #d9d9d9;
|
||||
|
||||
&:hover {
|
||||
border-color: #d9d9d9;
|
||||
}
|
||||
}
|
||||
|
||||
.multi-select-chip {
|
||||
color: rgba(0, 0, 0, 0.25);
|
||||
background-color: #eee;
|
||||
}
|
||||
}
|
||||
@@ -1,595 +0,0 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import './MultiSelect.styles.scss';
|
||||
|
||||
import { CloseOutlined, SearchOutlined } from '@ant-design/icons';
|
||||
import { Checkbox, Input, Spin } from 'antd';
|
||||
import { CheckboxChangeEvent } from 'antd/es/checkbox';
|
||||
import { InputRef } from 'antd/lib/input';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
|
||||
export interface MultiSelectOption {
|
||||
label: string;
|
||||
value: string;
|
||||
selected?: boolean;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export interface MultiSelectSection {
|
||||
title: string;
|
||||
options: MultiSelectOption[];
|
||||
}
|
||||
|
||||
export interface MultiSelectProps {
|
||||
/** Array of options to display in the dropdown */
|
||||
options: MultiSelectOption[];
|
||||
/** Callback when selected values change */
|
||||
onChange: (selectedValues: string[]) => void;
|
||||
/** Currently selected values */
|
||||
value?: string[];
|
||||
/** Placeholder text for the search input */
|
||||
placeholder?: string;
|
||||
/** Whether the component is in loading state */
|
||||
loading?: boolean;
|
||||
/** Allow users to add custom values */
|
||||
allowCustomValues?: boolean;
|
||||
/** Callback when search text changes - can be used for server filtering */
|
||||
onSearch?: (searchText: string) => void;
|
||||
/** Custom class name */
|
||||
className?: string;
|
||||
/** Additional sections to display (e.g., "Related Values") */
|
||||
additionalSections?: MultiSelectSection[];
|
||||
/** Show "Select All" option */
|
||||
showSelectAll?: boolean;
|
||||
/** Maximum height of dropdown in pixels */
|
||||
dropdownMaxHeight?: number;
|
||||
/** Maximum width of dropdown in pixels (defaults to matching input width) */
|
||||
dropdownMaxWidth?: number;
|
||||
/** Disable the component */
|
||||
disabled?: boolean;
|
||||
/** Error message to display */
|
||||
error?: string;
|
||||
/** Label text */
|
||||
label?: string;
|
||||
/** Allow users to clear all selections */
|
||||
allowClear?: boolean;
|
||||
/** Maximum height of a section */
|
||||
sectionMaxHeight?: number;
|
||||
}
|
||||
|
||||
function MultiSelect({
|
||||
options,
|
||||
onChange,
|
||||
value = [],
|
||||
placeholder = 'Search...',
|
||||
loading = false,
|
||||
allowCustomValues = true,
|
||||
onSearch,
|
||||
className = '',
|
||||
additionalSections = [],
|
||||
showSelectAll = true,
|
||||
dropdownMaxHeight = 400,
|
||||
dropdownMaxWidth,
|
||||
disabled = false,
|
||||
error,
|
||||
label,
|
||||
allowClear = true,
|
||||
sectionMaxHeight = 150,
|
||||
}: MultiSelectProps): JSX.Element {
|
||||
const [isDropdownOpen, setIsDropdownOpen] = useState<boolean>(false);
|
||||
const [searchText, setSearchText] = useState<string>('');
|
||||
const [selectedValues, setSelectedValues] = useState<string[]>(value);
|
||||
const [displayOptions, setDisplayOptions] = useState<MultiSelectOption[]>(
|
||||
options,
|
||||
);
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const inputRef = useRef<InputRef>(null);
|
||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||
const [focusedChipIndex, setFocusedChipIndex] = useState<number>(-1);
|
||||
const chipRefs = useRef<(HTMLDivElement | null)[]>([]);
|
||||
|
||||
// Handle save action - memoize with useCallback
|
||||
const handleSave = useCallback((): void => {
|
||||
setIsDropdownOpen(false);
|
||||
setSearchText('');
|
||||
onChange(selectedValues);
|
||||
}, [onChange, selectedValues]);
|
||||
|
||||
// Synchronize value prop with internal state
|
||||
useEffect(() => {
|
||||
setSelectedValues(value);
|
||||
}, [value]);
|
||||
|
||||
// Filter and sort options based on search text
|
||||
useEffect(() => {
|
||||
// Filter options based on search text
|
||||
const filteredOptions = options.filter((option) =>
|
||||
option.label.toLowerCase().includes(searchText.toLowerCase()),
|
||||
);
|
||||
|
||||
// Add custom value option if no matches found and allowCustomValues is true
|
||||
if (
|
||||
allowCustomValues &&
|
||||
searchText &&
|
||||
!filteredOptions.some(
|
||||
(option) => option.label.toLowerCase() === searchText.toLowerCase(),
|
||||
) &&
|
||||
!filteredOptions.some(
|
||||
(option) => option.value.toLowerCase() === searchText.toLowerCase(),
|
||||
)
|
||||
) {
|
||||
filteredOptions.unshift({
|
||||
label: `Add "${searchText}"`,
|
||||
value: searchText,
|
||||
});
|
||||
}
|
||||
|
||||
// Sort options: selected first, then matching search term
|
||||
const sortedOptions = [...filteredOptions].sort((a, b) => {
|
||||
// First by selection status
|
||||
if (selectedValues.includes(a.value) && !selectedValues.includes(b.value))
|
||||
return -1;
|
||||
if (!selectedValues.includes(a.value) && selectedValues.includes(b.value))
|
||||
return 1;
|
||||
|
||||
// Then by match position (exact matches or starts with come first)
|
||||
const aLower = a.label.toLowerCase();
|
||||
const bLower = b.label.toLowerCase();
|
||||
const searchLower = searchText.toLowerCase();
|
||||
|
||||
if (aLower === searchLower && bLower !== searchLower) return -1;
|
||||
if (aLower !== searchLower && bLower === searchLower) return 1;
|
||||
if (aLower.startsWith(searchLower) && !bLower.startsWith(searchLower))
|
||||
return -1;
|
||||
if (!aLower.startsWith(searchLower) && bLower.startsWith(searchLower))
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
});
|
||||
|
||||
setDisplayOptions(sortedOptions);
|
||||
}, [options, searchText, selectedValues, allowCustomValues]);
|
||||
|
||||
// Close dropdown when clicking outside
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent): void => {
|
||||
if (
|
||||
containerRef.current &&
|
||||
!containerRef.current.contains(event.target as Node)
|
||||
) {
|
||||
handleSave();
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return (): void => {
|
||||
document.removeEventListener('mousedown', handleClickOutside);
|
||||
};
|
||||
}, [selectedValues, handleSave]);
|
||||
|
||||
// Adjust dropdown position if needed
|
||||
useEffect(() => {
|
||||
if (isDropdownOpen && dropdownRef.current && containerRef.current) {
|
||||
const containerRect = containerRef.current.getBoundingClientRect();
|
||||
const dropdownHeight = dropdownRef.current.offsetHeight;
|
||||
const viewportHeight = window.innerHeight;
|
||||
|
||||
// Check if dropdown extends beyond viewport bottom
|
||||
if (
|
||||
containerRect.bottom + dropdownHeight > viewportHeight &&
|
||||
containerRect.top > dropdownHeight
|
||||
) {
|
||||
dropdownRef.current.style.top = 'auto';
|
||||
dropdownRef.current.style.bottom = '100%';
|
||||
dropdownRef.current.style.marginTop = '0';
|
||||
dropdownRef.current.style.marginBottom = '4px';
|
||||
}
|
||||
}
|
||||
}, [isDropdownOpen, displayOptions]);
|
||||
|
||||
// Handle search input change
|
||||
const handleSearchChange = (e: React.ChangeEvent<HTMLInputElement>): void => {
|
||||
const text = e.target.value;
|
||||
setSearchText(text);
|
||||
if (onSearch) {
|
||||
onSearch(text);
|
||||
}
|
||||
};
|
||||
|
||||
// Handle selection change
|
||||
const handleSelectionChange = (
|
||||
option: MultiSelectOption,
|
||||
e: CheckboxChangeEvent,
|
||||
): void => {
|
||||
const { checked } = e.target;
|
||||
let newSelectedValues: string[];
|
||||
|
||||
if (checked) {
|
||||
newSelectedValues = [...selectedValues, option.value];
|
||||
} else {
|
||||
newSelectedValues = selectedValues.filter((val) => val !== option.value);
|
||||
}
|
||||
|
||||
setSelectedValues(newSelectedValues);
|
||||
};
|
||||
|
||||
// Handle "All" checkbox change
|
||||
const handleSelectAll = (e: CheckboxChangeEvent): void => {
|
||||
if (e.target.checked) {
|
||||
const allValues = options
|
||||
.filter((option) => !option.disabled)
|
||||
.map((option) => option.value);
|
||||
setSelectedValues(allValues);
|
||||
} else {
|
||||
setSelectedValues([]);
|
||||
}
|
||||
};
|
||||
|
||||
// Remove a selected item
|
||||
const handleRemoveItem = useCallback(
|
||||
(value: string): void => {
|
||||
const newSelectedValues = selectedValues.filter((val) => val !== value);
|
||||
setSelectedValues(newSelectedValues);
|
||||
},
|
||||
[selectedValues],
|
||||
);
|
||||
|
||||
// Handle clicking the input area
|
||||
const handleInputClick = (): void => {
|
||||
if (!disabled) {
|
||||
setIsDropdownOpen(true);
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
};
|
||||
|
||||
// Handle clear all selections
|
||||
const handleClearAll = (): void => {
|
||||
setSelectedValues([]);
|
||||
setSearchText('');
|
||||
inputRef.current?.focus();
|
||||
};
|
||||
|
||||
// Get display value of a selection (chips)
|
||||
const getSelectedOptions = (): MultiSelectOption[] =>
|
||||
selectedValues.map((value) => {
|
||||
const option = options.find((opt) => opt.value === value);
|
||||
return {
|
||||
label: option?.label || value,
|
||||
value,
|
||||
};
|
||||
});
|
||||
|
||||
const selectedOptions = getSelectedOptions();
|
||||
const allSelectableOptions = options.filter((option) => !option.disabled);
|
||||
const allSelected =
|
||||
allSelectableOptions.length > 0 &&
|
||||
selectedValues.length === allSelectableOptions.length;
|
||||
|
||||
const containerClasses = [
|
||||
'multi-select-container',
|
||||
className,
|
||||
disabled ? 'multi-select-disabled' : '',
|
||||
error ? 'multi-select-error' : '',
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ');
|
||||
|
||||
const inputClasses = [
|
||||
'multi-select-input',
|
||||
isDropdownOpen ? 'multi-select-input-focused' : '',
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ');
|
||||
|
||||
// Reset chip refs array when selected options change
|
||||
useEffect(() => {
|
||||
chipRefs.current = Array(selectedOptions.length).fill(null);
|
||||
}, [selectedOptions.length]);
|
||||
|
||||
// Handle chip keyboard navigation
|
||||
const handleChipKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent, index: number) => {
|
||||
e.stopPropagation(); // Prevent bubbling to container
|
||||
|
||||
switch (e.key) {
|
||||
case 'ArrowLeft':
|
||||
e.preventDefault();
|
||||
// Move focus to previous chip
|
||||
if (index > 0) {
|
||||
setFocusedChipIndex(index - 1);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'ArrowRight':
|
||||
e.preventDefault();
|
||||
// Move focus to next chip or input
|
||||
if (index < selectedOptions.length - 1) {
|
||||
setFocusedChipIndex(index + 1);
|
||||
} else {
|
||||
// Focus the input when at the last chip
|
||||
setFocusedChipIndex(-1);
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Delete':
|
||||
case 'Backspace':
|
||||
e.preventDefault();
|
||||
// Remove current chip
|
||||
handleRemoveItem(selectedOptions[index].value);
|
||||
|
||||
// Adjust focus after deletion
|
||||
if (selectedOptions.length > 1) {
|
||||
// Focus previous chip if not at beginning
|
||||
const newIndex = Math.min(index, selectedOptions.length - 2);
|
||||
setFocusedChipIndex(newIndex);
|
||||
} else {
|
||||
// If this was the last chip, focus input
|
||||
setFocusedChipIndex(-1);
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Escape':
|
||||
e.preventDefault();
|
||||
// Return focus to input
|
||||
setFocusedChipIndex(-1);
|
||||
inputRef.current?.focus();
|
||||
break;
|
||||
default:
|
||||
// No-op for unhandled keys
|
||||
break;
|
||||
}
|
||||
},
|
||||
[selectedOptions, handleRemoveItem],
|
||||
);
|
||||
|
||||
// Handle key events in the input
|
||||
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>): void => {
|
||||
if (e.key === 'Enter') {
|
||||
// Add custom value on Enter if it doesn't exist
|
||||
if (
|
||||
allowCustomValues &&
|
||||
searchText &&
|
||||
!options.some(
|
||||
(option) => option.value.toLowerCase() === searchText.toLowerCase(),
|
||||
) &&
|
||||
!options.some(
|
||||
(option) => option.label.toLowerCase() === searchText.toLowerCase(),
|
||||
)
|
||||
) {
|
||||
const newSelectedValues = [...selectedValues, searchText];
|
||||
setSelectedValues(newSelectedValues);
|
||||
setSearchText('');
|
||||
} else if (isDropdownOpen) {
|
||||
handleSave();
|
||||
}
|
||||
} else if (e.key === 'Escape') {
|
||||
handleSave();
|
||||
} else if (
|
||||
e.key === 'Backspace' &&
|
||||
!searchText &&
|
||||
selectedValues.length > 0
|
||||
) {
|
||||
// Remove the last selected item when pressing backspace in an empty input
|
||||
const newSelectedValues = [...selectedValues];
|
||||
newSelectedValues.pop();
|
||||
setSelectedValues(newSelectedValues);
|
||||
} else if (e.key === 'Tab' && isDropdownOpen) {
|
||||
// Close dropdown but keep focus within component
|
||||
e.preventDefault();
|
||||
handleSave();
|
||||
}
|
||||
|
||||
// Add navigation TO chips when in input field
|
||||
if (e.key === 'ArrowLeft' && !searchText && selectedOptions.length > 0) {
|
||||
e.preventDefault();
|
||||
setFocusedChipIndex(selectedOptions.length - 1);
|
||||
}
|
||||
};
|
||||
|
||||
// Focus the appropriate chip when focusedChipIndex changes
|
||||
useEffect(() => {
|
||||
if (focusedChipIndex >= 0 && chipRefs.current[focusedChipIndex]) {
|
||||
chipRefs.current[focusedChipIndex]?.focus();
|
||||
}
|
||||
}, [focusedChipIndex]);
|
||||
|
||||
return (
|
||||
<div className={containerClasses} ref={containerRef}>
|
||||
{label && <div className="multi-select-label">{label}</div>}
|
||||
<div
|
||||
className={inputClasses}
|
||||
onClick={handleInputClick}
|
||||
onKeyDown={(e): void => {
|
||||
if (e.key === 'Enter' || e.key === ' ') {
|
||||
handleInputClick();
|
||||
}
|
||||
}}
|
||||
role="combobox"
|
||||
aria-expanded={isDropdownOpen}
|
||||
aria-haspopup="listbox"
|
||||
aria-controls="multi-select-dropdown"
|
||||
aria-owns="multi-select-dropdown"
|
||||
tabIndex={disabled ? -1 : 0}
|
||||
>
|
||||
<div className="multi-select-chips">
|
||||
{selectedOptions.map((option, index) => (
|
||||
<div
|
||||
key={option.value}
|
||||
className={`multi-select-chip ${
|
||||
focusedChipIndex === index ? 'multi-select-chip-focused' : ''
|
||||
}`}
|
||||
ref={(el): void => {
|
||||
chipRefs.current[index] = el;
|
||||
}}
|
||||
role="button"
|
||||
tabIndex={0}
|
||||
onKeyDown={(e): void => handleChipKeyDown(e, index)}
|
||||
onFocus={(): void => setFocusedChipIndex(index)}
|
||||
onClick={(e): void => e.stopPropagation()}
|
||||
aria-label={`Selected option: ${option.label}`}
|
||||
>
|
||||
{option.label}
|
||||
{!disabled && (
|
||||
<button
|
||||
type="button"
|
||||
className="multi-select-chip-remove"
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
handleRemoveItem(option.value);
|
||||
}}
|
||||
aria-label={`Remove ${option.label}`}
|
||||
tabIndex={-1} // Don't make the inner button tabbable
|
||||
>
|
||||
<CloseOutlined />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
<Input
|
||||
ref={inputRef}
|
||||
className="multi-select-search"
|
||||
placeholder={selectedOptions.length === 0 ? placeholder : ''}
|
||||
value={searchText}
|
||||
onChange={handleSearchChange}
|
||||
onKeyDown={handleKeyDown}
|
||||
onFocus={(): void => setIsDropdownOpen(true)}
|
||||
suffix={<SearchOutlined />}
|
||||
bordered={false}
|
||||
disabled={disabled}
|
||||
/>
|
||||
{allowClear && selectedValues.length > 0 && !disabled && (
|
||||
<button
|
||||
type="button"
|
||||
className="multi-select-clear-all"
|
||||
onClick={(e): void => {
|
||||
e.stopPropagation();
|
||||
handleClearAll();
|
||||
}}
|
||||
aria-label="Clear all selections"
|
||||
>
|
||||
<CloseOutlined />
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{error && <div className="multi-select-error-text">{error}</div>}
|
||||
|
||||
{isDropdownOpen && !disabled && (
|
||||
<div
|
||||
className="multi-select-dropdown"
|
||||
ref={dropdownRef}
|
||||
style={{
|
||||
maxHeight: `${dropdownMaxHeight}px`,
|
||||
maxWidth: dropdownMaxWidth ? `${dropdownMaxWidth}px` : undefined,
|
||||
}}
|
||||
id="multi-select-dropdown"
|
||||
role="listbox"
|
||||
aria-multiselectable="true"
|
||||
>
|
||||
{loading ? (
|
||||
<div className="multi-select-loading">
|
||||
<Spin size="small" />
|
||||
<span>We are updating the values ...</span>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
{showSelectAll && (
|
||||
<>
|
||||
<div className="multi-select-option">
|
||||
<Checkbox checked={allSelected} onChange={handleSelectAll}>
|
||||
ALL
|
||||
</Checkbox>
|
||||
</div>
|
||||
<div className="multi-select-divider" />
|
||||
</>
|
||||
)}
|
||||
<div
|
||||
className="multi-select-options-container"
|
||||
style={{ maxHeight: `${sectionMaxHeight}px` }}
|
||||
>
|
||||
{displayOptions.length > 0 ? (
|
||||
displayOptions.map((option) => (
|
||||
<div
|
||||
key={option.value}
|
||||
className="multi-select-option"
|
||||
role="option"
|
||||
aria-selected={selectedValues.includes(option.value)}
|
||||
>
|
||||
<Checkbox
|
||||
checked={selectedValues.includes(option.value)}
|
||||
onChange={(e): void => handleSelectionChange(option, e)}
|
||||
disabled={option.disabled}
|
||||
>
|
||||
{option.label}
|
||||
</Checkbox>
|
||||
</div>
|
||||
))
|
||||
) : (
|
||||
<div className="multi-select-no-results">
|
||||
{allowCustomValues && searchText
|
||||
? `Add "${searchText}"`
|
||||
: 'No results found'}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{additionalSections.map(
|
||||
(section) =>
|
||||
section.options.length > 0 && (
|
||||
<div key={`section-${section.title}`}>
|
||||
<div className="multi-select-divider" />
|
||||
<div className="multi-select-section-label">{section.title}</div>
|
||||
<div
|
||||
className="multi-select-section-content"
|
||||
style={{ maxHeight: `${sectionMaxHeight}px` }}
|
||||
>
|
||||
{section.options.map((option) => (
|
||||
<div
|
||||
key={option.value}
|
||||
className="multi-select-option"
|
||||
role="option"
|
||||
aria-selected={selectedValues.includes(option.value)}
|
||||
>
|
||||
<Checkbox
|
||||
checked={selectedValues.includes(option.value)}
|
||||
onChange={(e): void => handleSelectionChange(option, e)}
|
||||
disabled={option.disabled}
|
||||
>
|
||||
{option.label}
|
||||
</Checkbox>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Define defaultProps to fix linter warnings
|
||||
MultiSelect.defaultProps = {
|
||||
value: [],
|
||||
placeholder: 'Search...',
|
||||
loading: false,
|
||||
allowCustomValues: true,
|
||||
onSearch: undefined,
|
||||
className: '',
|
||||
additionalSections: [],
|
||||
showSelectAll: true,
|
||||
dropdownMaxHeight: 400,
|
||||
dropdownMaxWidth: undefined,
|
||||
disabled: false,
|
||||
error: undefined,
|
||||
label: undefined,
|
||||
allowClear: true,
|
||||
sectionMaxHeight: 150,
|
||||
};
|
||||
|
||||
export default MultiSelect;
|
||||
@@ -1,8 +0,0 @@
|
||||
import MultiSelect from './MultiSelect';
|
||||
|
||||
export type {
|
||||
MultiSelectOption,
|
||||
MultiSelectProps,
|
||||
MultiSelectSection,
|
||||
} from './MultiSelect';
|
||||
export default MultiSelect;
|
||||
@@ -75,7 +75,6 @@ const ROUTES = {
|
||||
METRICS_EXPLORER_BASE: '/metrics-explorer',
|
||||
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',
|
||||
HOME_PAGE: '/',
|
||||
DYNAMIC_VARIABLE_TEST: '/dynamic-variable-test',
|
||||
} as const;
|
||||
|
||||
export default ROUTES;
|
||||
|
||||
@@ -453,7 +453,7 @@ export const Query = memo(function Query({
|
||||
</Col>
|
||||
)}
|
||||
<Col flex="1" className="qb-search-container">
|
||||
{query.dataSource === DataSource.LOGS ? (
|
||||
{[DataSource.LOGS, DataSource.TRACES].includes(query.dataSource) ? (
|
||||
<QueryBuilderSearchV2
|
||||
query={query}
|
||||
onChange={handleChangeTagFilters}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import './QueryBuilderSearchV2.styles.scss';
|
||||
|
||||
import { Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import {
|
||||
ArrowDown,
|
||||
ArrowUp,
|
||||
@@ -25,6 +26,7 @@ interface ICustomDropdownProps {
|
||||
exampleQueries: TagFilter[];
|
||||
onChange: (value: TagFilter) => void;
|
||||
currentFilterItem?: ITag;
|
||||
isLogsDataSource: boolean;
|
||||
}
|
||||
|
||||
export default function QueryBuilderSearchDropdown(
|
||||
@@ -38,11 +40,14 @@ export default function QueryBuilderSearchDropdown(
|
||||
exampleQueries,
|
||||
options,
|
||||
onChange,
|
||||
isLogsDataSource,
|
||||
} = props;
|
||||
const userOs = getUserOperatingSystem();
|
||||
return (
|
||||
<>
|
||||
<div className="content">
|
||||
<div
|
||||
className={cx('content', { 'non-logs-data-source': !isLogsDataSource })}
|
||||
>
|
||||
{!currentFilterItem?.key ? (
|
||||
<div className="suggested-filters">Suggested Filters</div>
|
||||
) : !currentFilterItem?.op ? (
|
||||
|
||||
@@ -11,6 +11,11 @@
|
||||
.rc-virtual-list-holder {
|
||||
height: 115px;
|
||||
}
|
||||
&.non-logs-data-source {
|
||||
.rc-virtual-list-holder {
|
||||
height: 256px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -689,12 +689,29 @@ function QueryBuilderSearchV2(
|
||||
})),
|
||||
);
|
||||
} else {
|
||||
setDropdownOptions(
|
||||
data?.payload?.attributeKeys?.map((key) => ({
|
||||
setDropdownOptions([
|
||||
// Add user typed option if it doesn't exist in the payload
|
||||
...(!isEmpty(tagKey) &&
|
||||
!data?.payload?.attributeKeys?.some((val) => isEqual(val.key, tagKey))
|
||||
? [
|
||||
{
|
||||
label: tagKey,
|
||||
value: {
|
||||
key: tagKey,
|
||||
dataType: DataTypes.EMPTY,
|
||||
type: '',
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
},
|
||||
},
|
||||
]
|
||||
: []),
|
||||
// Map existing attribute keys from payload
|
||||
...(data?.payload?.attributeKeys?.map((key) => ({
|
||||
label: key.key,
|
||||
value: key,
|
||||
})) || [],
|
||||
);
|
||||
})) || []),
|
||||
]);
|
||||
}
|
||||
}
|
||||
if (currentState === DropdownState.OPERATOR) {
|
||||
@@ -964,6 +981,7 @@ function QueryBuilderSearchV2(
|
||||
exampleQueries={suggestionsData?.payload?.example_queries || []}
|
||||
tags={tags}
|
||||
currentFilterItem={currentFilterItem}
|
||||
isLogsDataSource={isLogsDataSource}
|
||||
/>
|
||||
)}
|
||||
>
|
||||
|
||||
@@ -170,11 +170,7 @@ export const useOptions = (
|
||||
(option, index, self) =>
|
||||
index ===
|
||||
self.findIndex(
|
||||
(o) =>
|
||||
// to remove duplicate & empty options from list
|
||||
o.label === option.label &&
|
||||
o.value === option.value &&
|
||||
o.dataType?.toLowerCase() === option.dataType?.toLowerCase(), // handle case sensitivity
|
||||
(o) => o.label === option.label && o.value === option.value, // to remove duplicate & empty options from list
|
||||
) && option.value !== '',
|
||||
) || []
|
||||
).map((option) => {
|
||||
|
||||
@@ -1,211 +0,0 @@
|
||||
import './styles.scss';
|
||||
|
||||
import { Button, Card, Col, Divider, Row, Switch, Typography } from 'antd';
|
||||
import MultiSelect, {
|
||||
MultiSelectOption,
|
||||
MultiSelectSection,
|
||||
} from 'components/MultiSelect';
|
||||
import { useState } from 'react';
|
||||
|
||||
const { Title, Text, Paragraph } = Typography;
|
||||
|
||||
// Sample data for the component
|
||||
const sampleOptions: MultiSelectOption[] = [
|
||||
{ label: 'abc', value: 'abc' },
|
||||
{ label: 'acbewc', value: 'acbewc' },
|
||||
{ label: 'custom-value', value: 'custom-value' },
|
||||
{ label: 'option1', value: 'option1' },
|
||||
{ label: 'option2', value: 'option2' },
|
||||
{ label: 'another-option', value: 'another-option' },
|
||||
{ label: 'test-option', value: 'test-option' },
|
||||
{ label: 'disabled-option', value: 'disabled-option', disabled: true },
|
||||
];
|
||||
|
||||
// Sample related values for the "Related Values" section
|
||||
const relatedValues: MultiSelectOption[] = [
|
||||
{ label: 'gke-mgmt-pl-generator-e2st4-sp-f1c1bde8-skbl', value: 'gke-1' },
|
||||
{ label: 'gke-mgmt-pl-generator-e2st4-sp-f1c1bde8-skb2', value: 'gke-2' },
|
||||
{ label: 'gke-mgmt-pl-generator-e2st4-sp-f1c1bde8-skb3', value: 'gke-3' },
|
||||
];
|
||||
|
||||
// Sample all values for the "All Values" section
|
||||
const allValues: MultiSelectOption[] = Array.from({ length: 20 }, (_, i) => ({
|
||||
label: `gke-mgmt-pl-generator-e2st4-sp-f1c1bde8-7a7w-${i + 1}`,
|
||||
value: `all-${i + 1}`,
|
||||
}));
|
||||
|
||||
// Creating sections
|
||||
const sections: MultiSelectSection[] = [
|
||||
{
|
||||
title: 'Related Values',
|
||||
options: relatedValues,
|
||||
},
|
||||
{
|
||||
title: 'ALL Values',
|
||||
options: allValues,
|
||||
},
|
||||
];
|
||||
|
||||
function DynamicVariableTestPage(): JSX.Element {
|
||||
const [selectedValues, setSelectedValues] = useState<string[]>([
|
||||
'abc',
|
||||
'acbewc',
|
||||
]);
|
||||
const [loadingDemo, setLoadingDemo] = useState<boolean>(false);
|
||||
const [allowCustom, setAllowCustom] = useState<boolean>(true);
|
||||
const [showError, setShowError] = useState<boolean>(false);
|
||||
const [disabled, setDisabled] = useState<boolean>(false);
|
||||
|
||||
const handleChange = (values: string[]): void => {
|
||||
setSelectedValues(values);
|
||||
};
|
||||
|
||||
const toggleLoading = (): void => {
|
||||
setLoadingDemo((prev) => !prev);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="dynamic-variable-page">
|
||||
<Card>
|
||||
<Title level={3}>Dynamic Variable MultiSelect</Title>
|
||||
<Paragraph>
|
||||
This page demonstrates the MultiSelect component with various features. The
|
||||
component is now fully reusable and production-ready with support for
|
||||
dynamic data from APIs, proper error states, accessibility, and UI
|
||||
improvements.
|
||||
</Paragraph>
|
||||
|
||||
<Divider />
|
||||
|
||||
<Row gutter={[16, 16]}>
|
||||
<Col xs={24} md={12}>
|
||||
<Title level={5}>Basic MultiSelect</Title>
|
||||
<Text>This example shows the basic usage with pre-selected values.</Text>
|
||||
<div className="multiselect-demo-container">
|
||||
<MultiSelect
|
||||
options={sampleOptions}
|
||||
value={selectedValues}
|
||||
onChange={handleChange}
|
||||
placeholder="Search or add values..."
|
||||
label="Select options"
|
||||
/>
|
||||
</div>
|
||||
<div className="selected-values-display">
|
||||
<Title level={5}>Selected Values:</Title>
|
||||
<pre>{JSON.stringify(selectedValues, null, 2)}</pre>
|
||||
</div>
|
||||
</Col>
|
||||
|
||||
<Col xs={24} md={12}>
|
||||
<Title level={5}>With Sections & All Values</Title>
|
||||
<Text>This example shows the component with additional sections.</Text>
|
||||
<div className="multiselect-demo-container">
|
||||
<MultiSelect
|
||||
options={sampleOptions}
|
||||
value={selectedValues}
|
||||
onChange={handleChange}
|
||||
placeholder="Search or add values..."
|
||||
additionalSections={sections}
|
||||
sectionMaxHeight={120}
|
||||
/>
|
||||
</div>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<Divider />
|
||||
|
||||
<Row gutter={[16, 16]}>
|
||||
<Col xs={24} md={12}>
|
||||
<Title level={5}>Loading State</Title>
|
||||
<Text>This example demonstrates the loading state.</Text>
|
||||
<div className="multiselect-demo-container">
|
||||
<MultiSelect
|
||||
options={sampleOptions}
|
||||
value={[]}
|
||||
onChange={(): void => {}}
|
||||
loading={loadingDemo}
|
||||
placeholder="This shows loading state..."
|
||||
/>
|
||||
<Button onClick={toggleLoading} style={{ marginTop: 16 }}>
|
||||
{loadingDemo ? 'Stop Loading' : 'Simulate Loading'}
|
||||
</Button>
|
||||
</div>
|
||||
</Col>
|
||||
|
||||
<Col xs={24} md={12}>
|
||||
<Title level={5}>Custom Values Configuration</Title>
|
||||
<Text>Toggle to enable/disable custom values.</Text>
|
||||
<div className="multiselect-demo-container">
|
||||
<MultiSelect
|
||||
options={sampleOptions}
|
||||
value={[]}
|
||||
onChange={(): void => {}}
|
||||
allowCustomValues={allowCustom}
|
||||
placeholder={
|
||||
allowCustom ? 'Custom values allowed...' : 'Only predefined values...'
|
||||
}
|
||||
/>
|
||||
<div style={{ marginTop: 16 }}>
|
||||
<Switch
|
||||
checked={allowCustom}
|
||||
onChange={setAllowCustom}
|
||||
checkedChildren="Custom values on"
|
||||
unCheckedChildren="Custom values off"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</Col>
|
||||
</Row>
|
||||
|
||||
<Divider />
|
||||
|
||||
<Row gutter={[16, 16]}>
|
||||
<Col xs={24} md={12}>
|
||||
<Title level={5}>Error State</Title>
|
||||
<Text>This example shows the component with an error.</Text>
|
||||
<div className="multiselect-demo-container">
|
||||
<MultiSelect
|
||||
options={sampleOptions}
|
||||
value={[]}
|
||||
onChange={(): void => {}}
|
||||
placeholder="Select some options..."
|
||||
error={showError ? 'Please select at least one option' : undefined}
|
||||
/>
|
||||
<Button
|
||||
onClick={(): void => setShowError(!showError)}
|
||||
style={{ marginTop: 16 }}
|
||||
type={showError ? 'primary' : 'default'}
|
||||
>
|
||||
{showError ? 'Hide Error' : 'Show Error'}
|
||||
</Button>
|
||||
</div>
|
||||
</Col>
|
||||
|
||||
<Col xs={24} md={12}>
|
||||
<Title level={5}>Disabled State</Title>
|
||||
<Text>This example shows the disabled state of the component.</Text>
|
||||
<div className="multiselect-demo-container">
|
||||
<MultiSelect
|
||||
options={sampleOptions}
|
||||
value={['abc', 'option1']}
|
||||
onChange={(): void => {}}
|
||||
placeholder="This component is disabled..."
|
||||
disabled={disabled}
|
||||
/>
|
||||
<div style={{ marginTop: 16 }}>
|
||||
<Switch
|
||||
checked={disabled}
|
||||
onChange={setDisabled}
|
||||
checkedChildren="Disabled"
|
||||
unCheckedChildren="Enabled"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</Col>
|
||||
</Row>
|
||||
</Card>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DynamicVariableTestPage;
|
||||
@@ -1,20 +0,0 @@
|
||||
.dynamic-variable-page {
|
||||
padding: 24px;
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.multiselect-demo-container {
|
||||
margin-top: 16px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.selected-values-display {
|
||||
margin-top: 24px;
|
||||
|
||||
pre {
|
||||
background-color: #f5f5f5;
|
||||
padding: 16px;
|
||||
border-radius: 6px;
|
||||
}
|
||||
}
|
||||
@@ -764,10 +764,7 @@ export function QueryBuilderProvider({
|
||||
);
|
||||
|
||||
const { safeNavigate } = useSafeNavigate({
|
||||
preventSameUrlNavigation: !(
|
||||
initialDataSource === DataSource.LOGS ||
|
||||
initialDataSource === DataSource.TRACES
|
||||
),
|
||||
preventSameUrlNavigation: false,
|
||||
});
|
||||
|
||||
const redirectWithQueryBuilderData = useCallback(
|
||||
|
||||
@@ -120,5 +120,4 @@ export const routePermission: Record<keyof typeof ROUTES, ROLES[]> = {
|
||||
API_MONITORING: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
WORKSPACE_ACCESS_RESTRICTED: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METRICS_EXPLORER_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
DYNAMIC_VARIABLE_TEST: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
};
|
||||
|
||||
6
go.mod
6
go.mod
@@ -6,6 +6,7 @@ toolchain go1.22.7
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.1
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.4
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.30.0
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
@@ -21,6 +22,7 @@ require (
|
||||
github.com/go-redis/redis/v8 v8.11.5
|
||||
github.com/go-redis/redismock/v8 v8.11.5
|
||||
github.com/go-viper/mapstructure/v2 v2.1.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/handlers v1.5.1
|
||||
@@ -61,6 +63,7 @@ require (
|
||||
go.opentelemetry.io/collector/pdata v1.17.0
|
||||
go.opentelemetry.io/collector/processor v0.111.0
|
||||
go.opentelemetry.io/contrib/config v0.10.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0
|
||||
go.opentelemetry.io/otel v1.34.0
|
||||
go.opentelemetry.io/otel/metric v1.34.0
|
||||
go.opentelemetry.io/otel/sdk v1.34.0
|
||||
@@ -128,6 +131,7 @@ require (
|
||||
github.com/goccy/go-json v0.10.3 // indirect
|
||||
github.com/gofrs/uuid v4.4.0+incompatible // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf // indirect
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
|
||||
@@ -201,6 +205,7 @@ require (
|
||||
github.com/smarty/assertions v1.15.0 // indirect
|
||||
github.com/spf13/cobra v1.8.1 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.13 // indirect
|
||||
@@ -245,7 +250,6 @@ require (
|
||||
go.opentelemetry.io/collector/receiver/receiverprofiles v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.111.0 // indirect
|
||||
go.opentelemetry.io/collector/service v0.111.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.30.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.6.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.30.0 // indirect
|
||||
|
||||
19
go.sum
19
go.sum
@@ -64,6 +64,8 @@ dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
|
||||
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.4 h1:iLRwjzz1mWmUEf5UNrSYOceQ+PX9SdBJ8Xw0DNrL114=
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.4/go.mod h1:W0Z82wJWkJxz2RVun/RMwxue3g7ut47Xxl+SFqdJGus=
|
||||
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.14.0 h1:nyQWyZvwGTvunIMxi1Y9uXkcyr+I7TeNrr/foo4Kpk8=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.14.0/go.mod h1:l38EPgmsp71HHLq9j7De57JcKOWPyhrsW1Awm1JS6K0=
|
||||
@@ -88,6 +90,7 @@ github.com/Code-Hex/go-generics-cache v1.5.1/go.mod h1:qxcC9kRVrct9rHeiYpFWSoW1v
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU=
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU=
|
||||
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||
github.com/DataDog/datadog-go v3.7.1+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
@@ -97,6 +100,7 @@ github.com/SigNoz/prometheus v1.13.0 h1:hsUql1zd83ifXtswO9Qk1rpCgVjE/ItQvgdNocBS
|
||||
github.com/SigNoz/prometheus v1.13.0/go.mod h1:4PC0dxmx6y3kNI2d9oOTvEFTPkH6QnxDxERyqeL1hvI=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.16 h1:535uKH5Oux+35EsI+L3C6pnAP/Ye0PTCbVizXoL+VqE=
|
||||
github.com/SigNoz/signoz-otel-collector v0.111.16/go.mod h1:HJ4m0LY1MPsuZmuRF7Ixb+bY8rxgRzI0VXzOedESsjg=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
@@ -142,6 +146,7 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
||||
github.com/cactus/go-statsd-client/statsd v0.0.0-20200423205355-cb0885a1018c/go.mod h1:l/bIBLeOl9eX+wxJAzxS4TveKRtAqlyDpHjhkfO0MEI=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
@@ -326,6 +331,10 @@ github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRx
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/gojek/heimdall/v7 v7.0.3 h1:+5sAhl8S0m+qRRL8IVeHCJudFh/XkG3wyO++nvOg+gc=
|
||||
github.com/gojek/heimdall/v7 v7.0.3/go.mod h1:Z43HtMid7ysSjmsedPTXAki6jcdcNVnjn5pmsTyiMic=
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf h1:5xRGbUdOmZKoDXkGx5evVLehuCMpuO1hl701bEQqXOM=
|
||||
github.com/gojek/valkyrie v0.0.0-20180215180059-6aee720afcdf/go.mod h1:QzhUKaYKJmcbTnCYCAVQrroCOY7vOOI8cSQ4NbuhYf0=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
@@ -431,6 +440,7 @@ github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK
|
||||
github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
|
||||
github.com/gophercloud/gophercloud v1.14.0 h1:Bt9zQDhPrbd4qX7EILGmy+i7GP35cc+AAL2+wIJpUE8=
|
||||
github.com/gophercloud/gophercloud v1.14.0/go.mod h1:aAVqcocTSXh2vYFZ1JTvx4EQmfgzxRcNupUfxZbBNDM=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g=
|
||||
github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k=
|
||||
github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4=
|
||||
@@ -800,6 +810,7 @@ github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0leargg
|
||||
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.0 h1:i+cMcpEDY1BkNm7lPDkCtE4oElsYLn+EKF8kAu2vXT4=
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
@@ -827,10 +838,14 @@ github.com/scaleway/scaleway-sdk-go v1.0.0-beta.30 h1:yoKAVkEVwAqbGbR8n87rHQ1dul
|
||||
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.30/go.mod h1:sH0u6fq6x4R5M7WxkoQFY/o7UaiItec0o1LinLCJNq8=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/sebdah/goldie/v2 v2.5.3 h1:9ES/mNN+HNUbNWpVAlrzuZ7jE+Nrczbj8uFRjM7624Y=
|
||||
github.com/sebdah/goldie/v2 v2.5.3/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvKI/NNtssI=
|
||||
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
|
||||
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/segmentio/backo-go v1.0.1 h1:68RQccglxZeyURy93ASB/2kc9QudzgIDexJ927N++y4=
|
||||
github.com/segmentio/backo-go v1.0.1/go.mod h1:9/Rh6yILuLysoQnZ2oNooD2g7aBnvM7r/fNVxRNWfBc=
|
||||
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/sethvargo/go-password v0.2.0 h1:BTDl4CC/gjf/axHMaDQtw507ogrXLci6XRiLc7i/UHI=
|
||||
github.com/sethvargo/go-password v0.2.0/go.mod h1:Ym4Mr9JXLBycr02MFuVQ/0JHidNetSgbzutTr3zsYXE=
|
||||
github.com/shirou/gopsutil/v4 v4.24.9 h1:KIV+/HaHD5ka5f570RZq+2SaeFsb/pq+fp2DGNWYoOI=
|
||||
@@ -848,6 +863,8 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/smarty/assertions v1.15.0 h1:cR//PqUBUiQRakZWqBiFFQ9wb8emQGDb0HeGdqGByCY=
|
||||
github.com/smarty/assertions v1.15.0/go.mod h1:yABtdzeQs6l1brC900WlRNwj6ZR55d7B+E8C6HtKdec=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/smartystreets/goconvey v1.8.1 h1:qGjIddxOk4grTu9JPOU31tVfq3cNdBlNa5sSznIX1xY=
|
||||
github.com/smartystreets/goconvey v1.8.1/go.mod h1:+/u4qLyY6x1jReYOp7GOM2FSt8aP9CzCZL03bI28W60=
|
||||
github.com/soheilhy/cmux v0.1.5 h1:jjzc5WVemNEDTLwv9tlmemhC73tI08BNOIGwBOo10Js=
|
||||
@@ -865,6 +882,7 @@ github.com/srikanthccv/ClickHouse-go-mock v0.11.0 h1:hKY9l7SbhI4IPPs7hjKAL1iDgKc
|
||||
github.com/srikanthccv/ClickHouse-go-mock v0.11.0/go.mod h1:CzFC21J4tLn7cEYdU5k6hg7yyf052xtZXUY2e3UF6+I=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
@@ -1353,6 +1371,7 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -33,16 +34,16 @@ type Alertmanager interface {
|
||||
ListAllChannels(context.Context) ([]*alertmanagertypes.Channel, error)
|
||||
|
||||
// GetChannelByID gets a channel for the organization.
|
||||
GetChannelByID(context.Context, string, int) (*alertmanagertypes.Channel, error)
|
||||
GetChannelByID(context.Context, string, valuer.UUID) (*alertmanagertypes.Channel, error)
|
||||
|
||||
// UpdateChannel updates a channel for the organization.
|
||||
UpdateChannelByReceiverAndID(context.Context, string, alertmanagertypes.Receiver, int) error
|
||||
UpdateChannelByReceiverAndID(context.Context, string, alertmanagertypes.Receiver, valuer.UUID) error
|
||||
|
||||
// CreateChannel creates a channel for the organization.
|
||||
CreateChannel(context.Context, string, alertmanagertypes.Receiver) error
|
||||
|
||||
// DeleteChannelByID deletes a channel for the organization.
|
||||
DeleteChannelByID(context.Context, string, int) error
|
||||
DeleteChannelByID(context.Context, string, valuer.UUID) error
|
||||
|
||||
// SetConfig sets the config for the organization.
|
||||
SetConfig(context.Context, *alertmanagertypes.Config) error
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/tidwall/gjson"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -99,7 +100,7 @@ func (store *config) CreateChannel(ctx context.Context, channel *alertmanagertyp
|
||||
}, opts...)
|
||||
}
|
||||
|
||||
func (store *config) GetChannelByID(ctx context.Context, orgID string, id int) (*alertmanagertypes.Channel, error) {
|
||||
func (store *config) GetChannelByID(ctx context.Context, orgID string, id valuer.UUID) (*alertmanagertypes.Channel, error) {
|
||||
channel := new(alertmanagertypes.Channel)
|
||||
|
||||
err := store.
|
||||
@@ -108,11 +109,11 @@ func (store *config) GetChannelByID(ctx context.Context, orgID string, id int) (
|
||||
NewSelect().
|
||||
Model(channel).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id = ?", id).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, errors.Newf(errors.TypeNotFound, alertmanagertypes.ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %d", id)
|
||||
return nil, errors.Newf(errors.TypeNotFound, alertmanagertypes.ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %s", id.StringValue())
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
@@ -136,7 +137,7 @@ func (store *config) UpdateChannel(ctx context.Context, orgID string, channel *a
|
||||
}, opts...)
|
||||
}
|
||||
|
||||
func (store *config) DeleteChannelByID(ctx context.Context, orgID string, id int, opts ...alertmanagertypes.StoreOption) error {
|
||||
func (store *config) DeleteChannelByID(ctx context.Context, orgID string, id valuer.UUID, opts ...alertmanagertypes.StoreOption) error {
|
||||
return store.wrap(ctx, func(ctx context.Context) error {
|
||||
channel := new(alertmanagertypes.Channel)
|
||||
|
||||
@@ -146,7 +147,7 @@ func (store *config) DeleteChannelByID(ctx context.Context, orgID string, id int
|
||||
NewDelete().
|
||||
Model(channel).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id = ?", id).
|
||||
Where("id = ?", id.StringValue()).
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -4,13 +4,13 @@ import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
@@ -140,9 +140,9 @@ func (api *API) GetChannelByID(rw http.ResponseWriter, req *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
id, err := strconv.Atoi(idString)
|
||||
id, err := valuer.NewUUID(idString)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid integer"))
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -177,9 +177,9 @@ func (api *API) UpdateChannelByID(rw http.ResponseWriter, req *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
id, err := strconv.Atoi(idString)
|
||||
id, err := valuer.NewUUID(idString)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid integer"))
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
@@ -227,9 +227,9 @@ func (api *API) DeleteChannelByID(rw http.ResponseWriter, req *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
id, err := strconv.Atoi(idString)
|
||||
id, err := valuer.NewUUID(idString)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid integer"))
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid-v7"))
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
@@ -269,11 +270,11 @@ func (provider *provider) ListAllChannels(ctx context.Context) ([]*alertmanagert
|
||||
return channels, nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID int) (*alertmanagertypes.Channel, error) {
|
||||
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) (*alertmanagertypes.Channel, error) {
|
||||
return provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
||||
}
|
||||
|
||||
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id int) error {
|
||||
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id valuer.UUID) error {
|
||||
channel, err := provider.configStore.GetChannelByID(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -378,7 +379,7 @@ func (provider *provider) CreateChannel(ctx context.Context, orgID string, recei
|
||||
}))
|
||||
}
|
||||
|
||||
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID int) error {
|
||||
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) error {
|
||||
channel, err := provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
@@ -99,11 +100,11 @@ func (provider *provider) ListAllChannels(ctx context.Context) ([]*alertmanagert
|
||||
return nil, errors.Newf(errors.TypeUnsupported, errors.CodeUnsupported, "not supported by provider signoz")
|
||||
}
|
||||
|
||||
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID int) (*alertmanagertypes.Channel, error) {
|
||||
func (provider *provider) GetChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) (*alertmanagertypes.Channel, error) {
|
||||
return provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
||||
}
|
||||
|
||||
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id int) error {
|
||||
func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgID string, receiver alertmanagertypes.Receiver, id valuer.UUID) error {
|
||||
channel, err := provider.configStore.GetChannelByID(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -127,7 +128,7 @@ func (provider *provider) UpdateChannelByReceiverAndID(ctx context.Context, orgI
|
||||
}))
|
||||
}
|
||||
|
||||
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID int) error {
|
||||
func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, channelID valuer.UUID) error {
|
||||
channel, err := provider.configStore.GetChannelByID(ctx, orgID, channelID)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
58
pkg/http/client/http.go
Normal file
58
pkg/http/client/http.go
Normal file
@@ -0,0 +1,58 @@
|
||||
package client
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/client/plugin"
|
||||
"github.com/gojek/heimdall/v7/httpclient"
|
||||
"go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp"
|
||||
"go.opentelemetry.io/otel/metric"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
c *httpclient.Client
|
||||
netc *http.Client
|
||||
}
|
||||
|
||||
func New(logger *slog.Logger, tracerProvider trace.TracerProvider, meterProvider metric.MeterProvider, opts ...Option) (*Client, error) {
|
||||
clientOpts := options{
|
||||
retryCount: 3,
|
||||
requestResponseLog: false,
|
||||
timeout: 5 * time.Second,
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(&clientOpts)
|
||||
}
|
||||
|
||||
netc := &http.Client{
|
||||
Timeout: clientOpts.timeout,
|
||||
Transport: otelhttp.NewTransport(http.DefaultTransport, otelhttp.WithTracerProvider(tracerProvider), otelhttp.WithMeterProvider(meterProvider)),
|
||||
}
|
||||
|
||||
c := httpclient.NewClient(
|
||||
httpclient.WithHTTPClient(netc),
|
||||
httpclient.WithRetrier(clientOpts.retriable),
|
||||
httpclient.WithRetryCount(clientOpts.retryCount),
|
||||
)
|
||||
|
||||
if clientOpts.requestResponseLog {
|
||||
c.AddPlugin(plugin.NewLog(logger))
|
||||
}
|
||||
|
||||
return &Client{
|
||||
netc: netc,
|
||||
c: c,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c *Client) Do(request *http.Request) (*http.Response, error) {
|
||||
return c.c.Do(request)
|
||||
}
|
||||
|
||||
func (c *Client) Client() *http.Client {
|
||||
return c.netc
|
||||
}
|
||||
42
pkg/http/client/option.go
Normal file
42
pkg/http/client/option.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package client
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/gojek/heimdall/v7"
|
||||
)
|
||||
|
||||
type Retriable = heimdall.Retriable
|
||||
|
||||
type options struct {
|
||||
retryCount int
|
||||
requestResponseLog bool
|
||||
timeout time.Duration
|
||||
retriable Retriable
|
||||
}
|
||||
|
||||
type Option func(*options)
|
||||
|
||||
func WithRetryCount(i int) Option {
|
||||
return func(o *options) {
|
||||
o.retryCount = i
|
||||
}
|
||||
}
|
||||
|
||||
func WithTimeout(i time.Duration) Option {
|
||||
return func(o *options) {
|
||||
o.timeout = i
|
||||
}
|
||||
}
|
||||
|
||||
func WithRequestResponseLog(b bool) Option {
|
||||
return func(o *options) {
|
||||
o.requestResponseLog = b
|
||||
}
|
||||
}
|
||||
|
||||
func WithRetriable(retriable Retriable) Option {
|
||||
return func(o *options) {
|
||||
o.retriable = retriable
|
||||
}
|
||||
}
|
||||
77
pkg/http/client/plugin/log.go
Normal file
77
pkg/http/client/plugin/log.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package plugin
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net"
|
||||
"net/http"
|
||||
|
||||
"github.com/gojek/heimdall/v7"
|
||||
semconv "go.opentelemetry.io/otel/semconv/v1.27.0"
|
||||
)
|
||||
|
||||
type reqResLog struct {
|
||||
logger *slog.Logger
|
||||
}
|
||||
|
||||
func NewLog(logger *slog.Logger) heimdall.Plugin {
|
||||
return &reqResLog{
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (plugin *reqResLog) OnRequestStart(request *http.Request) {
|
||||
host, port, _ := net.SplitHostPort(request.Host)
|
||||
fields := []any{
|
||||
string(semconv.HTTPRequestMethodKey), request.Method,
|
||||
string(semconv.URLPathKey), request.URL.Path,
|
||||
string(semconv.URLSchemeKey), request.URL.Scheme,
|
||||
string(semconv.UserAgentOriginalKey), request.UserAgent(),
|
||||
string(semconv.ServerAddressKey), host,
|
||||
string(semconv.ServerPortKey), port,
|
||||
string(semconv.HTTPRequestSizeKey), request.ContentLength,
|
||||
"http.request.headers", request.Header,
|
||||
}
|
||||
|
||||
plugin.logger.InfoContext(request.Context(), "::SENT-REQUEST::", fields...)
|
||||
}
|
||||
|
||||
func (plugin *reqResLog) OnRequestEnd(request *http.Request, response *http.Response) {
|
||||
fields := []any{
|
||||
string(semconv.HTTPResponseStatusCodeKey), response.StatusCode,
|
||||
string(semconv.HTTPResponseBodySizeKey), response.ContentLength,
|
||||
}
|
||||
|
||||
bodybytes, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
plugin.logger.DebugContext(request.Context(), "::UNABLE-TO-LOG-RESPONSE-BODY::", "error", err)
|
||||
} else {
|
||||
_ = response.Body.Close()
|
||||
response.Body = io.NopCloser(bytes.NewBuffer(bodybytes))
|
||||
|
||||
if len(bodybytes) > 0 {
|
||||
fields = append(fields, "http.response.body", string(bodybytes))
|
||||
} else {
|
||||
fields = append(fields, "http.response.body", "(empty)")
|
||||
}
|
||||
}
|
||||
|
||||
plugin.logger.InfoContext(request.Context(), "::RECEIVED-RESPONSE::", fields...)
|
||||
}
|
||||
|
||||
func (plugin *reqResLog) OnError(request *http.Request, err error) {
|
||||
host, port, _ := net.SplitHostPort(request.Host)
|
||||
fields := []any{
|
||||
err,
|
||||
string(semconv.HTTPRequestMethodKey), request.Method,
|
||||
string(semconv.URLPathKey), request.URL.Path,
|
||||
string(semconv.URLSchemeKey), request.URL.Scheme,
|
||||
string(semconv.UserAgentOriginalKey), request.UserAgent(),
|
||||
string(semconv.ServerAddressKey), host,
|
||||
string(semconv.ServerPortKey), port,
|
||||
string(semconv.HTTPRequestSizeKey), request.ContentLength,
|
||||
}
|
||||
|
||||
plugin.logger.ErrorContext(request.Context(), "::UNABLE-TO-SEND-REQUEST::", fields...)
|
||||
}
|
||||
149
pkg/modules/preference/api.go
Normal file
149
pkg/modules/preference/api.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package preference
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
type API interface {
|
||||
GetOrgPreference(http.ResponseWriter, *http.Request)
|
||||
UpdateOrgPreference(http.ResponseWriter, *http.Request)
|
||||
GetAllOrgPreferences(http.ResponseWriter, *http.Request)
|
||||
|
||||
GetUserPreference(http.ResponseWriter, *http.Request)
|
||||
UpdateUserPreference(http.ResponseWriter, *http.Request)
|
||||
GetAllUserPreferences(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
type preferenceAPI struct {
|
||||
usecase Usecase
|
||||
}
|
||||
|
||||
func NewAPI(usecase Usecase) API {
|
||||
return &preferenceAPI{usecase: usecase}
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) GetOrgPreference(rw http.ResponseWriter, r *http.Request) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
preference, err := p.usecase.GetOrgPreference(
|
||||
r.Context(), preferenceId, claims.OrgID,
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preference)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) UpdateOrgPreference(rw http.ResponseWriter, r *http.Request) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
req := preferencetypes.UpdatablePreference{}
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
err = p.usecase.UpdateOrgPreference(r.Context(), preferenceId, req.PreferenceValue, claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) GetAllOrgPreferences(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
preferences, err := p.usecase.GetAllOrgPreferences(
|
||||
r.Context(), claims.OrgID,
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preferences)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) GetUserPreference(rw http.ResponseWriter, r *http.Request) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
preference, err := p.usecase.GetUserPreference(
|
||||
r.Context(), preferenceId, claims.OrgID, claims.UserID,
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preference)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) UpdateUserPreference(rw http.ResponseWriter, r *http.Request) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
req := preferencetypes.UpdatablePreference{}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
err = p.usecase.UpdateUserPreference(r.Context(), preferenceId, req.PreferenceValue, claims.UserID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusNoContent, nil)
|
||||
}
|
||||
|
||||
func (p *preferenceAPI) GetAllUserPreferences(rw http.ResponseWriter, r *http.Request) {
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(rw, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
preferences, err := p.usecase.GetAllUserPreferences(
|
||||
r.Context(), claims.OrgID, claims.UserID,
|
||||
)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, preferences)
|
||||
}
|
||||
278
pkg/modules/preference/core/preference.go
Normal file
278
pkg/modules/preference/core/preference.go
Normal file
@@ -0,0 +1,278 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type usecase struct {
|
||||
store preferencetypes.PreferenceStore
|
||||
defaultMap map[string]preferencetypes.Preference
|
||||
}
|
||||
|
||||
func NewPreference(store preferencetypes.PreferenceStore, defaultMap map[string]preferencetypes.Preference) preference.Usecase {
|
||||
return &usecase{store: store, defaultMap: defaultMap}
|
||||
}
|
||||
|
||||
func (usecase *usecase) GetOrgPreference(ctx context.Context, preferenceID string, orgID string) (*preferencetypes.GettablePreference, error) {
|
||||
preference, seen := usecase.defaultMap[preferenceID]
|
||||
if !seen {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||
}
|
||||
|
||||
isPreferenceEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if !isPreferenceEnabled {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at org scope: %s", preferenceID))
|
||||
}
|
||||
|
||||
orgPreference, err := usecase.store.GetOrgPreference(ctx, orgID, preferenceID)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return &preferencetypes.GettablePreference{
|
||||
PreferenceID: preferenceID,
|
||||
PreferenceValue: preference.DefaultValue,
|
||||
}, nil
|
||||
}
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the org preference: %s", preferenceID))
|
||||
}
|
||||
|
||||
return &preferencetypes.GettablePreference{
|
||||
PreferenceID: preferenceID,
|
||||
PreferenceValue: preference.SanitizeValue(orgPreference.PreferenceValue),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) UpdateOrgPreference(ctx context.Context, preferenceID string, preferenceValue interface{}, orgId string) error {
|
||||
preference, seen := usecase.defaultMap[preferenceID]
|
||||
if !seen {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||
}
|
||||
|
||||
isPreferenceEnabled := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if !isPreferenceEnabled {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at org scope: %s", preferenceID))
|
||||
}
|
||||
|
||||
err := preference.IsValidValue(preferenceValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
storablePreferenceValue, encodeErr := json.Marshal(preferenceValue)
|
||||
if encodeErr != nil {
|
||||
return errors.Wrapf(encodeErr, errors.TypeInvalidInput, errors.CodeInvalidInput, "error in encoding the preference value")
|
||||
}
|
||||
|
||||
orgPreference, dberr := usecase.store.GetOrgPreference(ctx, orgId, preferenceID)
|
||||
if dberr != nil && dberr != sql.ErrNoRows {
|
||||
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in getting the preference value")
|
||||
}
|
||||
|
||||
if dberr != nil {
|
||||
orgPreference.ID = valuer.GenerateUUID()
|
||||
orgPreference.PreferenceID = preferenceID
|
||||
orgPreference.PreferenceValue = string(storablePreferenceValue)
|
||||
orgPreference.OrgID = orgId
|
||||
} else {
|
||||
orgPreference.PreferenceValue = string(storablePreferenceValue)
|
||||
}
|
||||
|
||||
dberr = usecase.store.UpsertOrgPreference(ctx, orgPreference)
|
||||
if dberr != nil {
|
||||
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in setting the preference value")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) GetAllOrgPreferences(ctx context.Context, orgID string) ([]*preferencetypes.PreferenceWithValue, error) {
|
||||
allOrgPreferences := []*preferencetypes.PreferenceWithValue{}
|
||||
orgPreferences, err := usecase.store.GetAllOrgPreferences(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all org preference values")
|
||||
}
|
||||
|
||||
preferenceValueMap := map[string]interface{}{}
|
||||
for _, preferenceValue := range orgPreferences {
|
||||
preferenceValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
for _, preference := range usecase.defaultMap {
|
||||
isEnabledForOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if isEnabledForOrgScope {
|
||||
preferenceWithValue := &preferencetypes.PreferenceWithValue{}
|
||||
preferenceWithValue.Key = preference.Key
|
||||
preferenceWithValue.Name = preference.Name
|
||||
preferenceWithValue.Description = preference.Description
|
||||
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
||||
preferenceWithValue.AllowedValues = preference.AllowedValues
|
||||
preferenceWithValue.DefaultValue = preference.DefaultValue
|
||||
preferenceWithValue.Range = preference.Range
|
||||
preferenceWithValue.ValueType = preference.ValueType
|
||||
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
||||
value, seen := preferenceValueMap[preference.Key]
|
||||
|
||||
if seen {
|
||||
preferenceWithValue.Value = value
|
||||
} else {
|
||||
preferenceWithValue.Value = preference.DefaultValue
|
||||
}
|
||||
|
||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||
allOrgPreferences = append(allOrgPreferences, preferenceWithValue)
|
||||
}
|
||||
}
|
||||
return allOrgPreferences, nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) GetUserPreference(ctx context.Context, preferenceID string, orgId string, userId string) (*preferencetypes.GettablePreference, error) {
|
||||
preference, seen := usecase.defaultMap[preferenceID]
|
||||
if !seen {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||
}
|
||||
|
||||
preferenceValue := preferencetypes.GettablePreference{
|
||||
PreferenceID: preferenceID,
|
||||
PreferenceValue: preference.DefaultValue,
|
||||
}
|
||||
|
||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||
if !isPreferenceEnabledAtUserScope {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at user scope: %s", preferenceID))
|
||||
}
|
||||
|
||||
isPreferenceEnabledAtOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if isPreferenceEnabledAtOrgScope {
|
||||
orgPreference, err := usecase.store.GetOrgPreference(ctx, orgId, preferenceID)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the org preference: %s", preferenceID))
|
||||
}
|
||||
if err == nil {
|
||||
preferenceValue.PreferenceValue = orgPreference.PreferenceValue
|
||||
}
|
||||
}
|
||||
|
||||
userPreference, err := usecase.store.GetUserPreference(ctx, userId, preferenceID)
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, fmt.Sprintf("error in fetching the user preference: %s", preferenceID))
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
preferenceValue.PreferenceValue = userPreference.PreferenceValue
|
||||
}
|
||||
|
||||
return &preferencetypes.GettablePreference{
|
||||
PreferenceID: preferenceValue.PreferenceID,
|
||||
PreferenceValue: preference.SanitizeValue(preferenceValue.PreferenceValue),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) UpdateUserPreference(ctx context.Context, preferenceID string, preferenceValue interface{}, userId string) error {
|
||||
preference, seen := usecase.defaultMap[preferenceID]
|
||||
if !seen {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("no such preferenceID exists: %s", preferenceID))
|
||||
}
|
||||
|
||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||
if !isPreferenceEnabledAtUserScope {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("preference is not enabled at user scope: %s", preferenceID))
|
||||
}
|
||||
|
||||
err := preference.IsValidValue(preferenceValue)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
storablePreferenceValue, encodeErr := json.Marshal(preferenceValue)
|
||||
if encodeErr != nil {
|
||||
return errors.Wrapf(encodeErr, errors.TypeInvalidInput, errors.CodeInvalidInput, "error in encoding the preference value")
|
||||
}
|
||||
|
||||
userPreference, dberr := usecase.store.GetUserPreference(ctx, userId, preferenceID)
|
||||
if dberr != nil && dberr != sql.ErrNoRows {
|
||||
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in getting the preference value")
|
||||
}
|
||||
|
||||
if dberr != nil {
|
||||
userPreference.ID = valuer.GenerateUUID()
|
||||
userPreference.PreferenceID = preferenceID
|
||||
userPreference.PreferenceValue = string(storablePreferenceValue)
|
||||
userPreference.UserID = userId
|
||||
} else {
|
||||
userPreference.PreferenceValue = string(storablePreferenceValue)
|
||||
}
|
||||
|
||||
dberr = usecase.store.UpsertUserPreference(ctx, userPreference)
|
||||
if dberr != nil {
|
||||
return errors.Wrapf(dberr, errors.TypeInternal, errors.CodeInternal, "error in setting the preference value")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (usecase *usecase) GetAllUserPreferences(ctx context.Context, orgID string, userID string) ([]*preferencetypes.PreferenceWithValue, error) {
|
||||
allUserPreferences := []*preferencetypes.PreferenceWithValue{}
|
||||
|
||||
orgPreferences, err := usecase.store.GetAllOrgPreferences(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all org preference values")
|
||||
}
|
||||
|
||||
preferenceOrgValueMap := map[string]interface{}{}
|
||||
for _, preferenceValue := range orgPreferences {
|
||||
preferenceOrgValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
userPreferences, err := usecase.store.GetAllUserPreferences(ctx, userID)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "error in setting all user preference values")
|
||||
}
|
||||
|
||||
preferenceUserValueMap := map[string]interface{}{}
|
||||
for _, preferenceValue := range userPreferences {
|
||||
preferenceUserValueMap[preferenceValue.PreferenceID] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
for _, preference := range usecase.defaultMap {
|
||||
isEnabledForUserScope := preference.IsEnabledForScope(preferencetypes.UserAllowedScope)
|
||||
|
||||
if isEnabledForUserScope {
|
||||
preferenceWithValue := &preferencetypes.PreferenceWithValue{}
|
||||
preferenceWithValue.Key = preference.Key
|
||||
preferenceWithValue.Name = preference.Name
|
||||
preferenceWithValue.Description = preference.Description
|
||||
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
||||
preferenceWithValue.AllowedValues = preference.AllowedValues
|
||||
preferenceWithValue.DefaultValue = preference.DefaultValue
|
||||
preferenceWithValue.Range = preference.Range
|
||||
preferenceWithValue.ValueType = preference.ValueType
|
||||
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
||||
preferenceWithValue.Value = preference.DefaultValue
|
||||
|
||||
isEnabledForOrgScope := preference.IsEnabledForScope(preferencetypes.OrgAllowedScope)
|
||||
if isEnabledForOrgScope {
|
||||
value, seen := preferenceOrgValueMap[preference.Key]
|
||||
if seen {
|
||||
preferenceWithValue.Value = value
|
||||
}
|
||||
}
|
||||
|
||||
value, seen := preferenceUserValueMap[preference.Key]
|
||||
|
||||
if seen {
|
||||
preferenceWithValue.Value = value
|
||||
}
|
||||
|
||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||
allUserPreferences = append(allUserPreferences, preferenceWithValue)
|
||||
}
|
||||
}
|
||||
return allUserPreferences, nil
|
||||
}
|
||||
116
pkg/modules/preference/core/store.go
Normal file
116
pkg/modules/preference/core/store.go
Normal file
@@ -0,0 +1,116 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewStore(db sqlstore.SQLStore) preferencetypes.PreferenceStore {
|
||||
return &store{store: db}
|
||||
}
|
||||
|
||||
func (store *store) GetOrgPreference(ctx context.Context, orgID string, preferenceID string) (*preferencetypes.StorableOrgPreference, error) {
|
||||
orgPreference := new(preferencetypes.StorableOrgPreference)
|
||||
err := store.
|
||||
store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(orgPreference).
|
||||
Where("preference_id = ?", preferenceID).
|
||||
Where("org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
return orgPreference, err
|
||||
}
|
||||
|
||||
return orgPreference, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAllOrgPreferences(ctx context.Context, orgID string) ([]*preferencetypes.StorableOrgPreference, error) {
|
||||
orgPreferences := make([]*preferencetypes.StorableOrgPreference, 0)
|
||||
err := store.
|
||||
store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&orgPreferences).
|
||||
Where("org_id = ?", orgID).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
return orgPreferences, err
|
||||
}
|
||||
|
||||
return orgPreferences, nil
|
||||
}
|
||||
|
||||
func (store *store) UpsertOrgPreference(ctx context.Context, orgPreference *preferencetypes.StorableOrgPreference) error {
|
||||
_, err := store.
|
||||
store.
|
||||
BunDB().
|
||||
NewInsert().
|
||||
Model(orgPreference).
|
||||
On("CONFLICT (id) DO UPDATE").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) GetUserPreference(ctx context.Context, userID string, preferenceID string) (*preferencetypes.StorableUserPreference, error) {
|
||||
userPreference := new(preferencetypes.StorableUserPreference)
|
||||
err := store.
|
||||
store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(userPreference).
|
||||
Where("preference_id = ?", preferenceID).
|
||||
Where("user_id = ?", userID).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
return userPreference, err
|
||||
}
|
||||
|
||||
return userPreference, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAllUserPreferences(ctx context.Context, userID string) ([]*preferencetypes.StorableUserPreference, error) {
|
||||
userPreferences := make([]*preferencetypes.StorableUserPreference, 0)
|
||||
err := store.
|
||||
store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(&userPreferences).
|
||||
Where("user_id = ?", userID).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
return userPreferences, err
|
||||
}
|
||||
|
||||
return userPreferences, nil
|
||||
}
|
||||
|
||||
func (store *store) UpsertUserPreference(ctx context.Context, userPreference *preferencetypes.StorableUserPreference) error {
|
||||
_, err := store.
|
||||
store.
|
||||
BunDB().
|
||||
NewInsert().
|
||||
Model(userPreference).
|
||||
On("CONFLICT (id) DO UPDATE").
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
17
pkg/modules/preference/usecase.go
Normal file
17
pkg/modules/preference/usecase.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package preference
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
)
|
||||
|
||||
type Usecase interface {
|
||||
GetOrgPreference(ctx context.Context, preferenceId string, orgId string) (*preferencetypes.GettablePreference, error)
|
||||
UpdateOrgPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, orgId string) error
|
||||
GetAllOrgPreferences(ctx context.Context, orgId string) ([]*preferencetypes.PreferenceWithValue, error)
|
||||
|
||||
GetUserPreference(ctx context.Context, preferenceId string, orgId string, userId string) (*preferencetypes.GettablePreference, error)
|
||||
UpdateUserPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, userId string) error
|
||||
GetAllUserPreferences(ctx context.Context, orgId string, userId string) ([]*preferencetypes.PreferenceWithValue, error)
|
||||
}
|
||||
@@ -1,6 +1,9 @@
|
||||
package agentConf
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
)
|
||||
|
||||
// Interface for features implemented via agent config.
|
||||
// Eg: ingestion side signal pre-processing features like log processing pipelines etc
|
||||
@@ -11,8 +14,9 @@ type AgentFeature interface {
|
||||
// Recommend config for an agent based on its `currentConfYaml` and
|
||||
// `configVersion` for the feature's settings
|
||||
RecommendAgentConfig(
|
||||
orgId string,
|
||||
currentConfYaml []byte,
|
||||
configVersion *ConfigVersion,
|
||||
configVersion *types.AgentConfigVersion,
|
||||
) (
|
||||
recommendedConfYaml []byte,
|
||||
|
||||
|
||||
@@ -6,8 +6,9 @@ import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
"golang.org/x/exp/slices"
|
||||
@@ -15,42 +16,33 @@ import (
|
||||
|
||||
// Repo handles DDL and DML ops on ingestion rules
|
||||
type Repo struct {
|
||||
db *sqlx.DB
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func (r *Repo) GetConfigHistory(
|
||||
ctx context.Context, typ ElementTypeDef, limit int,
|
||||
) ([]ConfigVersion, *model.ApiError) {
|
||||
var c []ConfigVersion
|
||||
err := r.db.SelectContext(ctx, &c, fmt.Sprintf(`SELECT
|
||||
version,
|
||||
id,
|
||||
element_type,
|
||||
COALESCE(created_by, -1) as created_by,
|
||||
created_at,
|
||||
COALESCE((SELECT NAME FROM users
|
||||
WHERE id = v.created_by), "unknown") created_by_name,
|
||||
active,
|
||||
is_valid,
|
||||
disabled,
|
||||
deploy_status,
|
||||
deploy_result,
|
||||
coalesce(last_hash, '') as last_hash,
|
||||
coalesce(last_config, '{}') as last_config
|
||||
FROM agent_config_versions AS v
|
||||
WHERE element_type = $1
|
||||
ORDER BY created_at desc, version desc
|
||||
limit %v`, limit),
|
||||
typ)
|
||||
ctx context.Context, orgId string, typ types.ElementTypeDef, limit int,
|
||||
) ([]types.AgentConfigVersion, *model.ApiError) {
|
||||
var c []types.AgentConfigVersion
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&c).
|
||||
ColumnExpr("id, version, element_type, active, is_valid, disabled, deploy_status, deploy_result, created_at").
|
||||
ColumnExpr("COALESCE(created_by, '') as created_by").
|
||||
ColumnExpr(`COALESCE((SELECT NAME FROM users WHERE users.id = acv.created_by), 'unknown') as created_by_name`).
|
||||
ColumnExpr("COALESCE(last_hash, '') as last_hash, COALESCE(last_config, '{}') as last_config").
|
||||
Where("acv.element_type = ?", typ).
|
||||
Where("acv.org_id = ?", orgId).
|
||||
OrderExpr("acv.created_at DESC, acv.version DESC").
|
||||
Limit(limit).
|
||||
Scan(ctx)
|
||||
|
||||
if err != nil {
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
incompleteStatuses := []DeployStatus{DeployInitiated, Deploying}
|
||||
incompleteStatuses := []types.DeployStatus{types.DeployInitiated, types.Deploying}
|
||||
for idx := 1; idx < len(c); idx++ {
|
||||
if slices.Contains(incompleteStatuses, c[idx].DeployStatus) {
|
||||
c[idx].DeployStatus = DeployStatusUnknown
|
||||
c[idx].DeployStatus = types.DeployStatusUnknown
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,32 +50,24 @@ func (r *Repo) GetConfigHistory(
|
||||
}
|
||||
|
||||
func (r *Repo) GetConfigVersion(
|
||||
ctx context.Context, typ ElementTypeDef, v int,
|
||||
) (*ConfigVersion, *model.ApiError) {
|
||||
var c ConfigVersion
|
||||
err := r.db.GetContext(ctx, &c, `SELECT
|
||||
id,
|
||||
version,
|
||||
element_type,
|
||||
COALESCE(created_by, -1) as created_by,
|
||||
created_at,
|
||||
COALESCE((SELECT NAME FROM users
|
||||
WHERE id = v.created_by), "unknown") created_by_name,
|
||||
active,
|
||||
is_valid,
|
||||
disabled,
|
||||
deploy_status,
|
||||
deploy_result,
|
||||
coalesce(last_hash, '') as last_hash,
|
||||
coalesce(last_config, '{}') as last_config
|
||||
FROM agent_config_versions v
|
||||
WHERE element_type = $1
|
||||
AND version = $2`, typ, v)
|
||||
ctx context.Context, orgId string, typ types.ElementTypeDef, v int,
|
||||
) (*types.AgentConfigVersion, *model.ApiError) {
|
||||
var c types.AgentConfigVersion
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&c).
|
||||
ColumnExpr("id, version, element_type, active, is_valid, disabled, deploy_status, deploy_result, created_at").
|
||||
ColumnExpr("COALESCE(created_by, '') as created_by").
|
||||
ColumnExpr(`COALESCE((SELECT NAME FROM users WHERE users.id = acv.created_by), 'unknown') as created_by_name`).
|
||||
ColumnExpr("COALESCE(last_hash, '') as last_hash, COALESCE(last_config, '{}') as last_config").
|
||||
Where("acv.element_type = ?", typ).
|
||||
Where("acv.version = ?", v).
|
||||
Where("acv.org_id = ?", orgId).
|
||||
Scan(ctx)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.NotFoundError(err)
|
||||
}
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, model.NotFoundError(err)
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
@@ -91,33 +75,23 @@ func (r *Repo) GetConfigVersion(
|
||||
}
|
||||
|
||||
func (r *Repo) GetLatestVersion(
|
||||
ctx context.Context, typ ElementTypeDef,
|
||||
) (*ConfigVersion, *model.ApiError) {
|
||||
var c ConfigVersion
|
||||
err := r.db.GetContext(ctx, &c, `SELECT
|
||||
id,
|
||||
version,
|
||||
element_type,
|
||||
COALESCE(created_by, -1) as created_by,
|
||||
created_at,
|
||||
COALESCE((SELECT NAME FROM users
|
||||
WHERE id = v.created_by), "unknown") created_by_name,
|
||||
active,
|
||||
is_valid,
|
||||
disabled,
|
||||
deploy_status,
|
||||
deploy_result
|
||||
FROM agent_config_versions AS v
|
||||
WHERE element_type = $1
|
||||
AND version = (
|
||||
SELECT MAX(version)
|
||||
FROM agent_config_versions
|
||||
WHERE element_type=$2)`, typ, typ)
|
||||
ctx context.Context, orgId string, typ types.ElementTypeDef,
|
||||
) (*types.AgentConfigVersion, *model.ApiError) {
|
||||
var c types.AgentConfigVersion
|
||||
err := r.store.BunDB().NewSelect().
|
||||
Model(&c).
|
||||
ColumnExpr("id, version, element_type, active, is_valid, disabled, deploy_status, deploy_result, created_at").
|
||||
ColumnExpr("COALESCE(created_by, '') as created_by").
|
||||
ColumnExpr(`COALESCE((SELECT NAME FROM users WHERE users.id = acv.created_by), 'unknown') as created_by_name`).
|
||||
Where("acv.element_type = ?", typ).
|
||||
Where("acv.org_id = ?", orgId).
|
||||
Where("version = (SELECT MAX(version) FROM agent_config_versions WHERE acv.element_type = ?)", typ).
|
||||
Scan(ctx)
|
||||
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, model.NotFoundError(err)
|
||||
}
|
||||
if err != nil {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return nil, model.NotFoundError(err)
|
||||
}
|
||||
return nil, model.InternalError(err)
|
||||
}
|
||||
|
||||
@@ -125,7 +99,7 @@ func (r *Repo) GetLatestVersion(
|
||||
}
|
||||
|
||||
func (r *Repo) insertConfig(
|
||||
ctx context.Context, userId string, c *ConfigVersion, elements []string,
|
||||
ctx context.Context, orgId string, userId string, c *types.AgentConfigVersion, elements []string,
|
||||
) (fnerr *model.ApiError) {
|
||||
|
||||
if string(c.ElementType) == "" {
|
||||
@@ -135,7 +109,7 @@ func (r *Repo) insertConfig(
|
||||
}
|
||||
|
||||
// allowing empty elements for logs - use case is deleting all pipelines
|
||||
if len(elements) == 0 && c.ElementType != ElementTypeLogPipelines {
|
||||
if len(elements) == 0 && c.ElementType != types.ElementTypeLogPipelines {
|
||||
zap.L().Error("insert config called with no elements ", zap.String("ElementType", string(c.ElementType)))
|
||||
return model.BadRequest(fmt.Errorf("config must have atleast one element"))
|
||||
}
|
||||
@@ -150,14 +124,14 @@ func (r *Repo) insertConfig(
|
||||
))
|
||||
}
|
||||
|
||||
configVersion, err := r.GetLatestVersion(ctx, c.ElementType)
|
||||
configVersion, err := r.GetLatestVersion(ctx, orgId, c.ElementType)
|
||||
if err != nil && err.Type() != model.ErrorNotFound {
|
||||
zap.L().Error("failed to fetch latest config version", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("failed to fetch latest config version"))
|
||||
}
|
||||
|
||||
if configVersion != nil {
|
||||
c.Version = updateVersion(configVersion.Version)
|
||||
c.Version = types.UpdateVersion(configVersion.Version)
|
||||
} else {
|
||||
// first version
|
||||
c.Version = 1
|
||||
@@ -166,57 +140,43 @@ func (r *Repo) insertConfig(
|
||||
defer func() {
|
||||
if fnerr != nil {
|
||||
// remove all the damage (invalid rows from db)
|
||||
_, _ = r.db.Exec("DELETE FROM agent_config_versions WHERE id = $1", c.ID)
|
||||
_, _ = r.db.Exec("DELETE FROM agent_config_elements WHERE version_id=$1", c.ID)
|
||||
r.store.BunDB().NewDelete().Model((*types.AgentConfigVersion)(nil)).Where("id = ?", c.ID).Where("org_id = ?", orgId).Exec(ctx)
|
||||
r.store.BunDB().NewDelete().Model((*types.AgentConfigElement)(nil)).Where("version_id = ?", c.ID).Where("org_id = ?", orgId).Exec(ctx)
|
||||
}
|
||||
}()
|
||||
|
||||
// insert config
|
||||
configQuery := `INSERT INTO agent_config_versions(
|
||||
id,
|
||||
version,
|
||||
created_by,
|
||||
element_type,
|
||||
active,
|
||||
is_valid,
|
||||
disabled,
|
||||
deploy_status,
|
||||
deploy_result)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`
|
||||
|
||||
_, dbErr := r.db.ExecContext(ctx,
|
||||
configQuery,
|
||||
c.ID,
|
||||
c.Version,
|
||||
userId,
|
||||
c.ElementType,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
c.DeployStatus,
|
||||
c.DeployResult)
|
||||
_, dbErr := r.store.BunDB().NewInsert().
|
||||
Model(&types.AgentConfigVersion{
|
||||
OrgID: orgId,
|
||||
Identifiable: types.Identifiable{ID: c.ID},
|
||||
Version: c.Version,
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: userId,
|
||||
},
|
||||
ElementType: c.ElementType,
|
||||
Active: false, // default value
|
||||
IsValid: false, // default value
|
||||
Disabled: false, // default value
|
||||
DeployStatus: c.DeployStatus,
|
||||
DeployResult: c.DeployResult,
|
||||
}).
|
||||
Exec(ctx)
|
||||
|
||||
if dbErr != nil {
|
||||
zap.L().Error("error in inserting config version: ", zap.Error(dbErr))
|
||||
return model.InternalError(errors.Wrap(dbErr, "failed to insert ingestion rule"))
|
||||
}
|
||||
|
||||
elementsQuery := `INSERT INTO agent_config_elements(
|
||||
id,
|
||||
version_id,
|
||||
element_type,
|
||||
element_id)
|
||||
VALUES ($1, $2, $3, $4)`
|
||||
|
||||
for _, e := range elements {
|
||||
_, dbErr = r.db.ExecContext(
|
||||
ctx,
|
||||
elementsQuery,
|
||||
uuid.NewString(),
|
||||
c.ID,
|
||||
c.ElementType,
|
||||
e,
|
||||
)
|
||||
agentConfigElement := &types.AgentConfigElement{
|
||||
OrgID: orgId,
|
||||
Identifiable: types.Identifiable{ID: valuer.GenerateUUID()},
|
||||
VersionID: c.ID.StringValue(),
|
||||
ElementType: string(c.ElementType),
|
||||
ElementID: e,
|
||||
}
|
||||
_, dbErr = r.store.BunDB().NewInsert().Model(agentConfigElement).Exec(ctx)
|
||||
if dbErr != nil {
|
||||
return model.InternalError(dbErr)
|
||||
}
|
||||
@@ -226,40 +186,43 @@ func (r *Repo) insertConfig(
|
||||
}
|
||||
|
||||
func (r *Repo) updateDeployStatus(ctx context.Context,
|
||||
elementType ElementTypeDef,
|
||||
orgId string,
|
||||
elementType types.ElementTypeDef,
|
||||
version int,
|
||||
status string,
|
||||
result string,
|
||||
lastHash string,
|
||||
lastconf string) *model.ApiError {
|
||||
|
||||
updateQuery := `UPDATE agent_config_versions
|
||||
set deploy_status = $1,
|
||||
deploy_result = $2,
|
||||
last_hash = COALESCE($3, last_hash),
|
||||
last_config = $4
|
||||
WHERE version=$5
|
||||
AND element_type = $6`
|
||||
|
||||
_, err := r.db.ExecContext(ctx, updateQuery, status, result, lastHash, lastconf, version, string(elementType))
|
||||
_, err := r.store.BunDB().NewUpdate().
|
||||
Model((*types.AgentConfigVersion)(nil)).
|
||||
Set("deploy_status = ?", status).
|
||||
Set("deploy_result = ?", result).
|
||||
Set("last_hash = COALESCE(?, last_hash)", lastHash).
|
||||
Set("last_config = ?", lastconf).
|
||||
Where("version = ?", version).
|
||||
Where("element_type = ?", elementType).
|
||||
Where("org_id = ?", orgId).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to update deploy status", zap.Error(err))
|
||||
return model.BadRequest(fmt.Errorf("failed to update deploy status"))
|
||||
return model.BadRequest(fmt.Errorf("failed to update deploy status"))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Repo) updateDeployStatusByHash(
|
||||
ctx context.Context, confighash string, status string, result string,
|
||||
ctx context.Context, orgId string, confighash string, status string, result string,
|
||||
) *model.ApiError {
|
||||
|
||||
updateQuery := `UPDATE agent_config_versions
|
||||
set deploy_status = $1,
|
||||
deploy_result = $2
|
||||
WHERE last_hash=$4`
|
||||
|
||||
_, err := r.db.ExecContext(ctx, updateQuery, status, result, confighash)
|
||||
_, err := r.store.BunDB().NewUpdate().
|
||||
Model((*types.AgentConfigVersion)(nil)).
|
||||
Set("deploy_status = ?", status).
|
||||
Set("deploy_result = ?", result).
|
||||
Where("last_hash = ?", confighash).
|
||||
Where("org_id = ?", orgId).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to update deploy status", zap.Error(err))
|
||||
return model.InternalError(errors.Wrap(err, "failed to update deploy status"))
|
||||
|
||||
@@ -12,8 +12,9 @@ import (
|
||||
filterprocessor "github.com/SigNoz/signoz/pkg/query-service/app/opamp/otelconfig/filterprocessor"
|
||||
tsp "github.com/SigNoz/signoz/pkg/query-service/app/opamp/otelconfig/tailsampler"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
yaml "gopkg.in/yaml.v3"
|
||||
@@ -39,7 +40,7 @@ type Manager struct {
|
||||
}
|
||||
|
||||
type ManagerOptions struct {
|
||||
DB *sqlx.DB
|
||||
Store sqlstore.SQLStore
|
||||
|
||||
// When acting as opamp.AgentConfigProvider, agent conf recommendations are
|
||||
// applied to the base conf in the order the features have been specified here.
|
||||
@@ -60,7 +61,7 @@ func Initiate(options *ManagerOptions) (*Manager, error) {
|
||||
}
|
||||
|
||||
m = &Manager{
|
||||
Repo: Repo{options.DB},
|
||||
Repo: Repo{options.Store},
|
||||
agentFeatures: options.AgentFeatures,
|
||||
configSubscribers: map[string]func(){},
|
||||
}
|
||||
@@ -90,7 +91,7 @@ func (m *Manager) notifyConfigUpdateSubscribers() {
|
||||
}
|
||||
|
||||
// Implements opamp.AgentConfigProvider
|
||||
func (m *Manager) RecommendAgentConfig(currentConfYaml []byte) (
|
||||
func (m *Manager) RecommendAgentConfig(orgId string, currentConfYaml []byte) (
|
||||
recommendedConfYaml []byte,
|
||||
// Opaque id of the recommended config, used for reporting deployment status updates
|
||||
configId string,
|
||||
@@ -100,13 +101,13 @@ func (m *Manager) RecommendAgentConfig(currentConfYaml []byte) (
|
||||
settingVersionsUsed := []string{}
|
||||
|
||||
for _, feature := range m.agentFeatures {
|
||||
featureType := ElementTypeDef(feature.AgentFeatureType())
|
||||
latestConfig, apiErr := GetLatestVersion(context.Background(), featureType)
|
||||
featureType := types.ElementTypeDef(feature.AgentFeatureType())
|
||||
latestConfig, apiErr := GetLatestVersion(context.Background(), orgId, featureType)
|
||||
if apiErr != nil && apiErr.Type() != model.ErrorNotFound {
|
||||
return nil, "", errors.Wrap(apiErr.ToError(), "failed to get latest agent config version")
|
||||
}
|
||||
|
||||
updatedConf, serializedSettingsUsed, apiErr := feature.RecommendAgentConfig(recommendation, latestConfig)
|
||||
updatedConf, serializedSettingsUsed, apiErr := feature.RecommendAgentConfig(orgId, recommendation, latestConfig)
|
||||
if apiErr != nil {
|
||||
return nil, "", errors.Wrap(apiErr.ToError(), fmt.Sprintf(
|
||||
"failed to generate agent config recommendation for %s", featureType,
|
||||
@@ -129,9 +130,10 @@ func (m *Manager) RecommendAgentConfig(currentConfYaml []byte) (
|
||||
|
||||
_ = m.updateDeployStatus(
|
||||
context.Background(),
|
||||
orgId,
|
||||
featureType,
|
||||
configVersion,
|
||||
string(DeployInitiated),
|
||||
string(types.DeployInitiated),
|
||||
"Deployment has started",
|
||||
configId,
|
||||
serializedSettingsUsed,
|
||||
@@ -154,52 +156,53 @@ func (m *Manager) RecommendAgentConfig(currentConfYaml []byte) (
|
||||
|
||||
// Implements opamp.AgentConfigProvider
|
||||
func (m *Manager) ReportConfigDeploymentStatus(
|
||||
orgId string,
|
||||
agentId string,
|
||||
configId string,
|
||||
err error,
|
||||
) {
|
||||
featureConfigIds := strings.Split(configId, ",")
|
||||
for _, featureConfId := range featureConfigIds {
|
||||
newStatus := string(Deployed)
|
||||
newStatus := string(types.Deployed)
|
||||
message := "Deployment was successful"
|
||||
if err != nil {
|
||||
newStatus = string(DeployFailed)
|
||||
newStatus = string(types.DeployFailed)
|
||||
message = fmt.Sprintf("%s: %s", agentId, err.Error())
|
||||
}
|
||||
_ = m.updateDeployStatusByHash(
|
||||
context.Background(), featureConfId, newStatus, message,
|
||||
context.Background(), orgId, featureConfId, newStatus, message,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func GetLatestVersion(
|
||||
ctx context.Context, elementType ElementTypeDef,
|
||||
) (*ConfigVersion, *model.ApiError) {
|
||||
return m.GetLatestVersion(ctx, elementType)
|
||||
ctx context.Context, orgId string, elementType types.ElementTypeDef,
|
||||
) (*types.AgentConfigVersion, *model.ApiError) {
|
||||
return m.GetLatestVersion(ctx, orgId, elementType)
|
||||
}
|
||||
|
||||
func GetConfigVersion(
|
||||
ctx context.Context, elementType ElementTypeDef, version int,
|
||||
) (*ConfigVersion, *model.ApiError) {
|
||||
return m.GetConfigVersion(ctx, elementType, version)
|
||||
ctx context.Context, orgId string, elementType types.ElementTypeDef, version int,
|
||||
) (*types.AgentConfigVersion, *model.ApiError) {
|
||||
return m.GetConfigVersion(ctx, orgId, elementType, version)
|
||||
}
|
||||
|
||||
func GetConfigHistory(
|
||||
ctx context.Context, typ ElementTypeDef, limit int,
|
||||
) ([]ConfigVersion, *model.ApiError) {
|
||||
return m.GetConfigHistory(ctx, typ, limit)
|
||||
ctx context.Context, orgId string, typ types.ElementTypeDef, limit int,
|
||||
) ([]types.AgentConfigVersion, *model.ApiError) {
|
||||
return m.GetConfigHistory(ctx, orgId, typ, limit)
|
||||
}
|
||||
|
||||
// StartNewVersion launches a new config version for given set of elements
|
||||
func StartNewVersion(
|
||||
ctx context.Context, userId string, eleType ElementTypeDef, elementIds []string,
|
||||
) (*ConfigVersion, *model.ApiError) {
|
||||
ctx context.Context, orgId string, userId string, eleType types.ElementTypeDef, elementIds []string,
|
||||
) (*types.AgentConfigVersion, *model.ApiError) {
|
||||
|
||||
// create a new version
|
||||
cfg := NewConfigVersion(eleType)
|
||||
cfg := types.NewAgentConfigVersion(orgId, eleType)
|
||||
|
||||
// insert new config and elements into database
|
||||
err := m.insertConfig(ctx, userId, cfg, elementIds)
|
||||
err := m.insertConfig(ctx, orgId, userId, cfg, elementIds)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -213,22 +216,22 @@ func NotifyConfigUpdate(ctx context.Context) {
|
||||
m.notifyConfigUpdateSubscribers()
|
||||
}
|
||||
|
||||
func Redeploy(ctx context.Context, typ ElementTypeDef, version int) *model.ApiError {
|
||||
func Redeploy(ctx context.Context, orgId string, typ types.ElementTypeDef, version int) *model.ApiError {
|
||||
|
||||
configVersion, err := GetConfigVersion(ctx, typ, version)
|
||||
configVersion, err := GetConfigVersion(ctx, orgId, typ, version)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to fetch config version during redeploy", zap.Error(err))
|
||||
return model.WrapApiError(err, "failed to fetch details of the config version")
|
||||
}
|
||||
|
||||
if configVersion == nil || (configVersion != nil && configVersion.LastConf == "") {
|
||||
if configVersion == nil || (configVersion != nil && configVersion.LastConfig == "") {
|
||||
zap.L().Debug("config version has no conf yaml", zap.Any("configVersion", configVersion))
|
||||
return model.BadRequest(fmt.Errorf("the config version can not be redeployed"))
|
||||
}
|
||||
switch typ {
|
||||
case ElementTypeSamplingRules:
|
||||
case types.ElementTypeSamplingRules:
|
||||
var config *tsp.Config
|
||||
if err := yaml.Unmarshal([]byte(configVersion.LastConf), &config); err != nil {
|
||||
if err := yaml.Unmarshal([]byte(configVersion.LastConfig), &config); err != nil {
|
||||
zap.L().Debug("failed to read last conf correctly", zap.Error(err))
|
||||
return model.BadRequest(fmt.Errorf("failed to read the stored config correctly"))
|
||||
}
|
||||
@@ -245,10 +248,10 @@ func Redeploy(ctx context.Context, typ ElementTypeDef, version int) *model.ApiEr
|
||||
return model.InternalError(fmt.Errorf("failed to deploy the config"))
|
||||
}
|
||||
|
||||
_ = m.updateDeployStatus(ctx, ElementTypeSamplingRules, version, string(DeployInitiated), "Deployment started", configHash, configVersion.LastConf)
|
||||
case ElementTypeDropRules:
|
||||
m.updateDeployStatus(ctx, orgId, types.ElementTypeSamplingRules, version, string(types.DeployInitiated), "Deployment started", configHash, configVersion.LastConfig)
|
||||
case types.ElementTypeDropRules:
|
||||
var filterConfig *filterprocessor.Config
|
||||
if err := yaml.Unmarshal([]byte(configVersion.LastConf), &filterConfig); err != nil {
|
||||
if err := yaml.Unmarshal([]byte(configVersion.LastConfig), &filterConfig); err != nil {
|
||||
zap.L().Error("failed to read last conf correctly", zap.Error(err))
|
||||
return model.InternalError(fmt.Errorf("failed to read the stored config correctly"))
|
||||
}
|
||||
@@ -263,14 +266,14 @@ func Redeploy(ctx context.Context, typ ElementTypeDef, version int) *model.ApiEr
|
||||
return err
|
||||
}
|
||||
|
||||
_ = m.updateDeployStatus(ctx, ElementTypeSamplingRules, version, string(DeployInitiated), "Deployment started", configHash, configVersion.LastConf)
|
||||
m.updateDeployStatus(ctx, orgId, types.ElementTypeSamplingRules, version, string(types.DeployInitiated), "Deployment started", configHash, configVersion.LastConfig)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpsertFilterProcessor updates the agent config with new filter processor params
|
||||
func UpsertFilterProcessor(ctx context.Context, version int, config *filterprocessor.Config) error {
|
||||
func UpsertFilterProcessor(ctx context.Context, orgId string, version int, config *filterprocessor.Config) error {
|
||||
if !atomic.CompareAndSwapUint32(&m.lock, 0, 1) {
|
||||
return fmt.Errorf("agent updater is busy")
|
||||
}
|
||||
@@ -294,7 +297,7 @@ func UpsertFilterProcessor(ctx context.Context, version int, config *filterproce
|
||||
zap.L().Warn("unexpected error while transforming processor config to yaml", zap.Error(yamlErr))
|
||||
}
|
||||
|
||||
_ = m.updateDeployStatus(ctx, ElementTypeDropRules, version, string(DeployInitiated), "Deployment started", configHash, string(processorConfYaml))
|
||||
m.updateDeployStatus(ctx, orgId, types.ElementTypeDropRules, version, string(types.DeployInitiated), "Deployment started", configHash, string(processorConfYaml))
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -303,9 +306,9 @@ func UpsertFilterProcessor(ctx context.Context, version int, config *filterproce
|
||||
// successful deployment if no error is received.
|
||||
// this method is currently expected to be called only once in the lifecycle
|
||||
// but can be improved in future to accept continuous request status updates from opamp
|
||||
func (m *Manager) OnConfigUpdate(agentId string, hash string, err error) {
|
||||
func (m *Manager) OnConfigUpdate(orgId string, agentId string, hash string, err error) {
|
||||
|
||||
status := string(Deployed)
|
||||
status := string(types.Deployed)
|
||||
|
||||
message := "Deployment was successful"
|
||||
|
||||
@@ -314,15 +317,15 @@ func (m *Manager) OnConfigUpdate(agentId string, hash string, err error) {
|
||||
}()
|
||||
|
||||
if err != nil {
|
||||
status = string(DeployFailed)
|
||||
status = string(types.DeployFailed)
|
||||
message = fmt.Sprintf("%s: %s", agentId, err.Error())
|
||||
}
|
||||
|
||||
_ = m.updateDeployStatusByHash(context.Background(), hash, status, message)
|
||||
_ = m.updateDeployStatusByHash(context.Background(), orgId, hash, status, message)
|
||||
}
|
||||
|
||||
// UpsertSamplingProcessor updates the agent config with new filter processor params
|
||||
func UpsertSamplingProcessor(ctx context.Context, version int, config *tsp.Config) error {
|
||||
func UpsertSamplingProcessor(ctx context.Context, orgId string, version int, config *tsp.Config) error {
|
||||
if !atomic.CompareAndSwapUint32(&m.lock, 0, 1) {
|
||||
return fmt.Errorf("agent updater is busy")
|
||||
}
|
||||
@@ -345,6 +348,6 @@ func UpsertSamplingProcessor(ctx context.Context, version int, config *tsp.Confi
|
||||
zap.L().Warn("unexpected error while transforming processor config to yaml", zap.Error(yamlErr))
|
||||
}
|
||||
|
||||
_ = m.updateDeployStatus(ctx, ElementTypeSamplingRules, version, string(DeployInitiated), "Deployment started", configHash, string(processorConfYaml))
|
||||
m.updateDeployStatus(ctx, orgId, types.ElementTypeSamplingRules, version, string(types.DeployInitiated), "Deployment started", configHash, string(processorConfYaml))
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,72 +1,10 @@
|
||||
package agentConf
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type ElementTypeDef string
|
||||
|
||||
const (
|
||||
ElementTypeSamplingRules ElementTypeDef = "sampling_rules"
|
||||
ElementTypeDropRules ElementTypeDef = "drop_rules"
|
||||
ElementTypeLogPipelines ElementTypeDef = "log_pipelines"
|
||||
ElementTypeLbExporter ElementTypeDef = "lb_exporter"
|
||||
)
|
||||
|
||||
type DeployStatus string
|
||||
|
||||
const (
|
||||
PendingDeploy DeployStatus = "DIRTY"
|
||||
Deploying DeployStatus = "DEPLOYING"
|
||||
Deployed DeployStatus = "DEPLOYED"
|
||||
DeployInitiated DeployStatus = "IN_PROGRESS"
|
||||
DeployFailed DeployStatus = "FAILED"
|
||||
DeployStatusUnknown DeployStatus = "UNKNOWN"
|
||||
)
|
||||
|
||||
type ConfigVersion struct {
|
||||
ID string `json:"id" db:"id"`
|
||||
Version int `json:"version" db:"version"`
|
||||
ElementType ElementTypeDef `json:"elementType" db:"element_type"`
|
||||
Active bool `json:"active" db:"active"`
|
||||
IsValid bool `json:"is_valid" db:"is_valid"`
|
||||
Disabled bool `json:"disabled" db:"disabled"`
|
||||
|
||||
DeployStatus DeployStatus `json:"deployStatus" db:"deploy_status"`
|
||||
DeployResult string `json:"deployResult" db:"deploy_result"`
|
||||
|
||||
LastHash string `json:"lastHash" db:"last_hash"`
|
||||
LastConf string `json:"lastConf" db:"last_config"`
|
||||
|
||||
CreatedBy string `json:"createdBy" db:"created_by"`
|
||||
CreatedByName string `json:"createdByName" db:"created_by_name"`
|
||||
CreatedAt time.Time `json:"createdAt" db:"created_at"`
|
||||
}
|
||||
|
||||
func NewConfigVersion(typeDef ElementTypeDef) *ConfigVersion {
|
||||
return &ConfigVersion{
|
||||
ID: uuid.NewString(),
|
||||
ElementType: typeDef,
|
||||
Active: false,
|
||||
IsValid: false,
|
||||
Disabled: false,
|
||||
DeployStatus: PendingDeploy,
|
||||
LastHash: "",
|
||||
LastConf: "{}",
|
||||
// todo: get user id from context?
|
||||
// CreatedBy
|
||||
}
|
||||
}
|
||||
|
||||
func updateVersion(v int) int {
|
||||
return v + 1
|
||||
}
|
||||
import "github.com/SigNoz/signoz/pkg/types"
|
||||
|
||||
type ConfigElements struct {
|
||||
VersionID string
|
||||
Version int
|
||||
ElementType ElementTypeDef
|
||||
ElementType types.ElementTypeDef
|
||||
ElementId string
|
||||
}
|
||||
|
||||
@@ -174,7 +174,7 @@ func (r *ClickHouseReader) getValuesForLogAttributes(
|
||||
from %s.%s
|
||||
where tag_key = $%d and (
|
||||
string_value != '' or number_value is not null
|
||||
)
|
||||
) and tag_type != 'logfield'
|
||||
limit %d
|
||||
)`, r.logsDB, r.logsTagAttributeTableV2, idx+1, limit))
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -124,7 +124,7 @@ func (c *Controller) GenerateConnectionUrl(
|
||||
}
|
||||
|
||||
// TODO(Raj): parameterized this in follow up changes
|
||||
agentVersion := "0.0.2"
|
||||
agentVersion := "0.0.3"
|
||||
|
||||
connectionUrl := fmt.Sprintf(
|
||||
"https://%s.console.aws.amazon.com/cloudformation/home?region=%s#/stacks/quickcreate?",
|
||||
|
||||
File diff suppressed because one or more lines are too long
Binary file not shown.
|
After Width: | Height: | Size: 371 KiB |
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 6.0 KiB |
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,3 @@
|
||||
### Monitor Managed Streaming Kafka with SigNoz
|
||||
|
||||
Collect key MSK metrics and view them with an out of the box dashboard.
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
errorsV2 "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -44,7 +45,6 @@ import (
|
||||
logsv4 "github.com/SigNoz/signoz/pkg/query-service/app/logs/v4"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/metrics"
|
||||
metricsv3 "github.com/SigNoz/signoz/pkg/query-service/app/metrics/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -142,6 +142,8 @@ type APIHandler struct {
|
||||
AlertmanagerAPI *alertmanager.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
|
||||
Preference preference.API
|
||||
}
|
||||
|
||||
type APIHandlerOpts struct {
|
||||
@@ -187,6 +189,8 @@ type APIHandlerOpts struct {
|
||||
AlertmanagerAPI *alertmanager.API
|
||||
|
||||
Signoz *signoz.SigNoz
|
||||
|
||||
Preference preference.API
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
@@ -257,6 +261,7 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
SummaryService: summaryService,
|
||||
AlertmanagerAPI: opts.AlertmanagerAPI,
|
||||
Signoz: opts.Signoz,
|
||||
Preference: opts.Preference,
|
||||
}
|
||||
|
||||
logsQueryBuilder := logsv3.PrepareLogsQuery
|
||||
@@ -1858,8 +1863,15 @@ func (aH *APIHandler) setTTL(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
ctx := r.Context()
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
RespondError(w, &model.ApiError{Err: errors.New("failed to get org id from context"), Typ: model.ErrorInternal}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
// Context is not used here as TTL is long duration DB operation
|
||||
result, apiErr := aH.reader.SetTTL(context.Background(), ttlParams)
|
||||
result, apiErr := aH.reader.SetTTL(context.Background(), claims.OrgID, ttlParams)
|
||||
if apiErr != nil {
|
||||
if apiErr.Typ == model.ErrorConflict {
|
||||
aH.HandleError(w, apiErr.Err, http.StatusConflict)
|
||||
@@ -1879,7 +1891,14 @@ func (aH *APIHandler) getTTL(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.reader.GetTTL(r.Context(), ttlParams)
|
||||
ctx := r.Context()
|
||||
claims, ok := authtypes.ClaimsFromContext(ctx)
|
||||
if !ok {
|
||||
RespondError(w, &model.ApiError{Err: errors.New("failed to get org id from context"), Typ: model.ErrorInternal}, nil)
|
||||
return
|
||||
}
|
||||
|
||||
result, apiErr := aH.reader.GetTTL(r.Context(), claims.OrgID, ttlParams)
|
||||
if apiErr != nil && aH.HandleError(w, apiErr.Err, http.StatusInternalServerError) {
|
||||
return
|
||||
}
|
||||
@@ -3408,132 +3427,37 @@ func (aH *APIHandler) getProducerConsumerEval(
|
||||
func (aH *APIHandler) getUserPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
preference, apiErr := preferences.GetUserPreference(
|
||||
r.Context(), preferenceId, claims.OrgID, claims.UserID,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, preference)
|
||||
aH.Preference.GetUserPreference(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) updateUserPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
req := preferences.UpdatePreference{}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
preference, apiErr := preferences.UpdateUserPreference(r.Context(), preferenceId, req.PreferenceValue, claims.UserID)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, preference)
|
||||
aH.Preference.UpdateUserPreference(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getAllUserPreferences(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
preference, apiErr := preferences.GetAllUserPreferences(
|
||||
r.Context(), claims.OrgID, claims.UserID,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, preference)
|
||||
aH.Preference.GetAllUserPreferences(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getOrgPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
preference, apiErr := preferences.GetOrgPreference(
|
||||
r.Context(), preferenceId, claims.OrgID,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, preference)
|
||||
aH.Preference.GetOrgPreference(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) updateOrgPreference(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
preferenceId := mux.Vars(r)["preferenceId"]
|
||||
req := preferences.UpdatePreference{}
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
|
||||
err := json.NewDecoder(r.Body).Decode(&req)
|
||||
|
||||
if err != nil {
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
preference, apiErr := preferences.UpdateOrgPreference(r.Context(), preferenceId, req.PreferenceValue, claims.OrgID)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, preference)
|
||||
aH.Preference.UpdateOrgPreference(w, r)
|
||||
}
|
||||
|
||||
func (aH *APIHandler) getAllOrgPreferences(
|
||||
w http.ResponseWriter, r *http.Request,
|
||||
) {
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
render.Error(w, errorsV2.Newf(errorsV2.TypeUnauthenticated, errorsV2.CodeUnauthenticated, "unauthenticated"))
|
||||
return
|
||||
}
|
||||
preference, apiErr := preferences.GetAllOrgPreferences(
|
||||
r.Context(), claims.OrgID,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
}
|
||||
|
||||
aH.Respond(w, preference)
|
||||
aH.Preference.GetAllOrgPreferences(w, r)
|
||||
}
|
||||
|
||||
// RegisterIntegrationRoutes Registers all Integrations
|
||||
@@ -3812,9 +3736,14 @@ func (aH *APIHandler) InstallIntegration(
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
RespondError(w, model.UnauthorizedError(errors.New("unauthorized")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
integration, apiErr := aH.IntegrationsController.Install(
|
||||
r.Context(), &req,
|
||||
r.Context(), claims.OrgID, &req,
|
||||
)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
@@ -3834,8 +3763,13 @@ func (aH *APIHandler) UninstallIntegration(
|
||||
RespondError(w, model.BadRequest(err), nil)
|
||||
return
|
||||
}
|
||||
claims, ok := authtypes.ClaimsFromContext(r.Context())
|
||||
if !ok {
|
||||
RespondError(w, model.UnauthorizedError(errors.New("unauthorized")), nil)
|
||||
return
|
||||
}
|
||||
|
||||
apiErr := aH.IntegrationsController.Uninstall(r.Context(), &req)
|
||||
apiErr := aH.IntegrationsController.Uninstall(r.Context(), claims.OrgID, &req)
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, nil)
|
||||
return
|
||||
@@ -4494,7 +4428,7 @@ func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID string) (
|
||||
) {
|
||||
// get lateset agent config
|
||||
latestVersion := -1
|
||||
lastestConfig, err := agentConf.GetLatestVersion(ctx, logPipelines)
|
||||
lastestConfig, err := agentConf.GetLatestVersion(ctx, orgID, logPipelines)
|
||||
if err != nil && err.Type() != model.ErrorNotFound {
|
||||
return nil, model.WrapApiError(err, "failed to get latest agent config version")
|
||||
}
|
||||
@@ -4503,14 +4437,14 @@ func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID string) (
|
||||
latestVersion = lastestConfig.Version
|
||||
}
|
||||
|
||||
payload, err := aH.LogsParsingPipelineController.GetPipelinesByVersion(ctx, latestVersion)
|
||||
payload, err := aH.LogsParsingPipelineController.GetPipelinesByVersion(ctx, orgID, latestVersion)
|
||||
if err != nil {
|
||||
return nil, model.WrapApiError(err, "failed to get pipelines")
|
||||
}
|
||||
|
||||
// todo(Nitya): make a new API for history pagination
|
||||
limit := 10
|
||||
history, err := agentConf.GetConfigHistory(ctx, logPipelines, limit)
|
||||
history, err := agentConf.GetConfigHistory(ctx, orgID, logPipelines, limit)
|
||||
if err != nil {
|
||||
return nil, model.WrapApiError(err, "failed to get config history")
|
||||
}
|
||||
@@ -4522,14 +4456,14 @@ func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID string) (
|
||||
func (aH *APIHandler) listLogsPipelinesByVersion(ctx context.Context, orgID string, version int) (
|
||||
*logparsingpipeline.PipelinesResponse, *model.ApiError,
|
||||
) {
|
||||
payload, err := aH.LogsParsingPipelineController.GetPipelinesByVersion(ctx, version)
|
||||
payload, err := aH.LogsParsingPipelineController.GetPipelinesByVersion(ctx, orgID, version)
|
||||
if err != nil {
|
||||
return nil, model.WrapApiError(err, "failed to get pipelines by version")
|
||||
}
|
||||
|
||||
// todo(Nitya): make a new API for history pagination
|
||||
limit := 10
|
||||
history, err := agentConf.GetConfigHistory(ctx, logPipelines, limit)
|
||||
history, err := agentConf.GetConfigHistory(ctx, orgID, logPipelines, limit)
|
||||
if err != nil {
|
||||
return nil, model.WrapApiError(err, "failed to retrieve agent config history")
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ type InstallIntegrationRequest struct {
|
||||
}
|
||||
|
||||
func (c *Controller) Install(
|
||||
ctx context.Context, req *InstallIntegrationRequest,
|
||||
ctx context.Context, orgId string, req *InstallIntegrationRequest,
|
||||
) (*IntegrationsListItem, *model.ApiError) {
|
||||
res, apiErr := c.mgr.InstallIntegration(
|
||||
ctx, req.IntegrationId, req.Config,
|
||||
@@ -104,7 +104,7 @@ type UninstallIntegrationRequest struct {
|
||||
}
|
||||
|
||||
func (c *Controller) Uninstall(
|
||||
ctx context.Context, req *UninstallIntegrationRequest,
|
||||
ctx context.Context, orgId string, req *UninstallIntegrationRequest,
|
||||
) *model.ApiError {
|
||||
if len(req.IntegrationId) < 1 {
|
||||
return model.BadRequest(fmt.Errorf(
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
"github.com/google/uuid"
|
||||
@@ -39,10 +40,10 @@ func NewLogParsingPipelinesController(
|
||||
|
||||
// PipelinesResponse is used to prepare http response for pipelines config related requests
|
||||
type PipelinesResponse struct {
|
||||
*agentConf.ConfigVersion
|
||||
*types.AgentConfigVersion
|
||||
|
||||
Pipelines []pipelinetypes.GettablePipeline `json:"pipelines"`
|
||||
History []agentConf.ConfigVersion `json:"history"`
|
||||
History []types.AgentConfigVersion `json:"history"`
|
||||
}
|
||||
|
||||
// ApplyPipelines stores new or changed pipelines and initiates a new config update
|
||||
@@ -86,12 +87,12 @@ func (ic *LogParsingPipelineController) ApplyPipelines(
|
||||
}
|
||||
|
||||
// prepare config by calling gen func
|
||||
cfg, err := agentConf.StartNewVersion(ctx, claims.UserID, agentConf.ElementTypeLogPipelines, elements)
|
||||
cfg, err := agentConf.StartNewVersion(ctx, claims.OrgID, claims.UserID, types.ElementTypeLogPipelines, elements)
|
||||
if err != nil || cfg == nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ic.GetPipelinesByVersion(ctx, cfg.Version)
|
||||
return ic.GetPipelinesByVersion(ctx, claims.OrgID, cfg.Version)
|
||||
}
|
||||
|
||||
func (ic *LogParsingPipelineController) ValidatePipelines(
|
||||
@@ -138,20 +139,12 @@ func (ic *LogParsingPipelineController) ValidatePipelines(
|
||||
// Returns effective list of pipelines including user created
|
||||
// pipelines and pipelines for installed integrations
|
||||
func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
||||
ctx context.Context, version int,
|
||||
ctx context.Context, orgID string, version int,
|
||||
) ([]pipelinetypes.GettablePipeline, *model.ApiError) {
|
||||
result := []pipelinetypes.GettablePipeline{}
|
||||
|
||||
// todo(nitya): remove this once we fix agents in multitenancy
|
||||
defaultOrgID, err := ic.GetDefaultOrgID(ctx)
|
||||
if err != nil {
|
||||
return nil, model.WrapApiError(err, "failed to get default org ID")
|
||||
}
|
||||
|
||||
fmt.Println("defaultOrgID", defaultOrgID)
|
||||
|
||||
if version >= 0 {
|
||||
savedPipelines, errors := ic.getPipelinesByVersion(ctx, defaultOrgID, version)
|
||||
savedPipelines, errors := ic.getPipelinesByVersion(ctx, orgID, version)
|
||||
if errors != nil {
|
||||
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Errors("errors", errors))
|
||||
return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version %v", errors))
|
||||
@@ -203,18 +196,18 @@ func (ic *LogParsingPipelineController) getEffectivePipelinesByVersion(
|
||||
|
||||
// GetPipelinesByVersion responds with version info and associated pipelines
|
||||
func (ic *LogParsingPipelineController) GetPipelinesByVersion(
|
||||
ctx context.Context, version int,
|
||||
ctx context.Context, orgId string, version int,
|
||||
) (*PipelinesResponse, *model.ApiError) {
|
||||
|
||||
pipelines, errors := ic.getEffectivePipelinesByVersion(ctx, version)
|
||||
pipelines, errors := ic.getEffectivePipelinesByVersion(ctx, orgId, version)
|
||||
if errors != nil {
|
||||
zap.L().Error("failed to get pipelines for version", zap.Int("version", version), zap.Error(errors))
|
||||
return nil, model.InternalError(fmt.Errorf("failed to get pipelines for given version %v", errors))
|
||||
}
|
||||
|
||||
var configVersion *agentConf.ConfigVersion
|
||||
var configVersion *types.AgentConfigVersion
|
||||
if version >= 0 {
|
||||
cv, err := agentConf.GetConfigVersion(ctx, agentConf.ElementTypeLogPipelines, version)
|
||||
cv, err := agentConf.GetConfigVersion(ctx, orgId, types.ElementTypeLogPipelines, version)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to get config for version", zap.Int("version", version), zap.Error(err))
|
||||
return nil, model.WrapApiError(err, "failed to get config for given version")
|
||||
@@ -223,8 +216,8 @@ func (ic *LogParsingPipelineController) GetPipelinesByVersion(
|
||||
}
|
||||
|
||||
return &PipelinesResponse{
|
||||
ConfigVersion: configVersion,
|
||||
Pipelines: pipelines,
|
||||
AgentConfigVersion: configVersion,
|
||||
Pipelines: pipelines,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -263,8 +256,9 @@ func (pc *LogParsingPipelineController) AgentFeatureType() agentConf.AgentFeatur
|
||||
|
||||
// Implements agentConf.AgentFeature interface.
|
||||
func (pc *LogParsingPipelineController) RecommendAgentConfig(
|
||||
orgId string,
|
||||
currentConfYaml []byte,
|
||||
configVersion *agentConf.ConfigVersion,
|
||||
configVersion *types.AgentConfigVersion,
|
||||
) (
|
||||
recommendedConfYaml []byte,
|
||||
serializedSettingsUsed string,
|
||||
@@ -276,7 +270,7 @@ func (pc *LogParsingPipelineController) RecommendAgentConfig(
|
||||
}
|
||||
|
||||
pipelinesResp, apiErr := pc.GetPipelinesByVersion(
|
||||
context.Background(), pipelinesVersion,
|
||||
context.Background(), orgId, pipelinesVersion,
|
||||
)
|
||||
if apiErr != nil {
|
||||
return nil, "", apiErr
|
||||
|
||||
@@ -129,20 +129,6 @@ func (r *Repo) getPipelinesByVersion(
|
||||
return gettablePipelines, errors
|
||||
}
|
||||
|
||||
func (r *Repo) GetDefaultOrgID(ctx context.Context) (string, *model.ApiError) {
|
||||
var orgs []types.Organization
|
||||
err := r.sqlStore.BunDB().NewSelect().
|
||||
Model(&orgs).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return "", model.InternalError(errors.Wrap(err, "failed to get default org ID"))
|
||||
}
|
||||
if len(orgs) == 0 {
|
||||
return "", model.InternalError(errors.New("no orgs found"))
|
||||
}
|
||||
return orgs[0].ID, nil
|
||||
}
|
||||
|
||||
// GetPipelines returns pipeline and errors (if any)
|
||||
func (r *Repo) GetPipeline(
|
||||
ctx context.Context, orgID string, id string,
|
||||
|
||||
@@ -225,21 +225,21 @@ func (receiver *SummaryService) GetMetricsTreemap(ctx context.Context, params *m
|
||||
var response metrics_explorer.TreeMap
|
||||
switch params.Treemap {
|
||||
case metrics_explorer.TimeSeriesTeeMap:
|
||||
cardinality, apiError := receiver.reader.GetMetricsTimeSeriesPercentage(ctx, params)
|
||||
ts, apiError := receiver.reader.GetMetricsTimeSeriesPercentage(ctx, params)
|
||||
if apiError != nil {
|
||||
return nil, apiError
|
||||
}
|
||||
if cardinality != nil {
|
||||
response.TimeSeries = *cardinality
|
||||
if ts != nil {
|
||||
response.TimeSeries = *ts
|
||||
}
|
||||
return &response, nil
|
||||
case metrics_explorer.SamplesTreeMap:
|
||||
dataPoints, apiError := receiver.reader.GetMetricsSamplesPercentage(ctx, params)
|
||||
samples, apiError := receiver.reader.GetMetricsSamplesPercentage(ctx, params)
|
||||
if apiError != nil {
|
||||
return nil, apiError
|
||||
}
|
||||
if dataPoints != nil {
|
||||
response.Samples = *dataPoints
|
||||
if samples != nil {
|
||||
response.Samples = *samples
|
||||
}
|
||||
return &response, nil
|
||||
default:
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/knadh/koanf"
|
||||
"github.com/knadh/koanf/parsers/yaml"
|
||||
"github.com/knadh/koanf/providers/rawbytes"
|
||||
@@ -21,6 +22,9 @@ func TestOpAMPServerToAgentCommunicationWithConfigProvider(t *testing.T) {
|
||||
|
||||
tb := newTestbed(t)
|
||||
|
||||
orgID, err := utils.GetTestOrgId(tb.sqlStore)
|
||||
require.Nil(err)
|
||||
|
||||
require.Equal(
|
||||
0, len(tb.testConfigProvider.ConfigUpdateSubscribers),
|
||||
"there should be no agent config subscribers at the start",
|
||||
@@ -36,6 +40,7 @@ func TestOpAMPServerToAgentCommunicationWithConfigProvider(t *testing.T) {
|
||||
require.False(tb.testConfigProvider.HasRecommendations())
|
||||
agent1Conn := &MockOpAmpConnection{}
|
||||
agent1Id := "testAgent1"
|
||||
// get orgId from the db
|
||||
tb.opampServer.OnMessage(
|
||||
agent1Conn,
|
||||
&protobufs.AgentToServer{
|
||||
@@ -43,6 +48,16 @@ func TestOpAMPServerToAgentCommunicationWithConfigProvider(t *testing.T) {
|
||||
EffectiveConfig: &protobufs.EffectiveConfig{
|
||||
ConfigMap: initialAgentConf(),
|
||||
},
|
||||
AgentDescription: &protobufs.AgentDescription{
|
||||
IdentifyingAttributes: []*protobufs.KeyValue{
|
||||
{
|
||||
Key: "orgId",
|
||||
Value: &protobufs.AnyValue{
|
||||
Value: &protobufs.AnyValue_StringValue{StringValue: orgID},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
lastAgent1Msg := agent1Conn.LatestMsgFromServer()
|
||||
@@ -66,6 +81,16 @@ func TestOpAMPServerToAgentCommunicationWithConfigProvider(t *testing.T) {
|
||||
EffectiveConfig: &protobufs.EffectiveConfig{
|
||||
ConfigMap: initialAgentConf(),
|
||||
},
|
||||
AgentDescription: &protobufs.AgentDescription{
|
||||
IdentifyingAttributes: []*protobufs.KeyValue{
|
||||
{
|
||||
Key: "orgId",
|
||||
Value: &protobufs.AnyValue{
|
||||
Value: &protobufs.AnyValue_StringValue{StringValue: orgID},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
lastAgent2Msg := agent2Conn.LatestMsgFromServer()
|
||||
@@ -162,22 +187,26 @@ type testbed struct {
|
||||
testConfigProvider *MockAgentConfigProvider
|
||||
opampServer *Server
|
||||
t *testing.T
|
||||
sqlStore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func newTestbed(t *testing.T) *testbed {
|
||||
testDB := utils.NewQueryServiceDBForTests(t)
|
||||
_, err := model.InitDB(testDB.SQLxDB())
|
||||
if err != nil {
|
||||
t.Fatalf("could not init opamp model: %v", err)
|
||||
}
|
||||
|
||||
model.InitDB(testDB)
|
||||
testConfigProvider := NewMockAgentConfigProvider()
|
||||
opampServer := InitializeServer(nil, testConfigProvider)
|
||||
|
||||
// create a test org
|
||||
err := utils.CreateTestOrg(t, testDB)
|
||||
if err != nil {
|
||||
t.Fatalf("could not create test org: %v", err)
|
||||
}
|
||||
|
||||
return &testbed{
|
||||
testConfigProvider: testConfigProvider,
|
||||
opampServer: opampServer,
|
||||
t: t,
|
||||
sqlStore: testDB,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -60,13 +60,13 @@ func UpsertControlProcessors(
|
||||
|
||||
agenthash, err := addIngestionControlToAgent(agent, signal, processors, false)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to push ingestion rules config to agent", zap.String("agentID", agent.ID), zap.Error(err))
|
||||
zap.L().Error("failed to push ingestion rules config to agent", zap.String("agentID", agent.ID.StringValue()), zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
if agenthash != "" {
|
||||
// subscribe callback
|
||||
model.ListenToConfigUpdate(agent.ID, agenthash, callback)
|
||||
model.ListenToConfigUpdate(agent.ID.StringValue(), agenthash, callback)
|
||||
}
|
||||
|
||||
hash = agenthash
|
||||
@@ -89,7 +89,7 @@ func addIngestionControlToAgent(agent *model.Agent, signal string, processors ma
|
||||
// add ingestion control spec
|
||||
err = makeIngestionControlSpec(agentConf, Signal(signal), processors)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare ingestion control processors for agent", zap.String("agentID", agent.ID), zap.Error(err))
|
||||
zap.L().Error("failed to prepare ingestion control processors for agent", zap.String("agentID", agent.ID.StringValue()), zap.Error(err))
|
||||
return confHash, err
|
||||
}
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ func (ta *MockAgentConfigProvider) HasRecommendations() bool {
|
||||
}
|
||||
|
||||
// AgentConfigProvider interface
|
||||
func (ta *MockAgentConfigProvider) RecommendAgentConfig(baseConfYaml []byte) (
|
||||
func (ta *MockAgentConfigProvider) RecommendAgentConfig(orgId string, baseConfYaml []byte) (
|
||||
[]byte, string, error,
|
||||
) {
|
||||
if len(ta.ZPagesEndpoint) < 1 {
|
||||
@@ -92,6 +92,7 @@ func (ta *MockAgentConfigProvider) RecommendAgentConfig(baseConfYaml []byte) (
|
||||
|
||||
// AgentConfigProvider interface
|
||||
func (ta *MockAgentConfigProvider) ReportConfigDeploymentStatus(
|
||||
orgId string,
|
||||
agentId string,
|
||||
configId string,
|
||||
err error,
|
||||
|
||||
@@ -7,33 +7,20 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
"google.golang.org/protobuf/proto"
|
||||
|
||||
"github.com/open-telemetry/opamp-go/protobufs"
|
||||
"github.com/open-telemetry/opamp-go/server/types"
|
||||
opampTypes "github.com/open-telemetry/opamp-go/server/types"
|
||||
)
|
||||
|
||||
type AgentStatus int
|
||||
|
||||
const (
|
||||
AgentStatusUnknown AgentStatus = iota
|
||||
AgentStatusConnected
|
||||
AgentStatusDisconnected
|
||||
)
|
||||
|
||||
// set in agent description when agent is capable of supporting
|
||||
// lb exporter configuration. values: 1 (true) or 0 (false)
|
||||
const lbExporterFlag = "capabilities.lbexporter"
|
||||
|
||||
type Agent struct {
|
||||
ID string `json:"agentId" yaml:"agentId" db:"agent_id"`
|
||||
StartedAt time.Time `json:"startedAt" yaml:"startedAt" db:"started_at"`
|
||||
TerminatedAt time.Time `json:"terminatedAt" yaml:"terminatedAt" db:"terminated_at"`
|
||||
EffectiveConfig string `json:"effectiveConfig" yaml:"effectiveConfig" db:"effective_config"`
|
||||
CurrentStatus AgentStatus `json:"currentStatus" yaml:"currentStatus" db:"current_status"`
|
||||
remoteConfig *protobufs.AgentRemoteConfig
|
||||
Status *protobufs.AgentToServer
|
||||
types.StorableAgent
|
||||
remoteConfig *protobufs.AgentRemoteConfig
|
||||
Status *protobufs.AgentToServer
|
||||
|
||||
// can this agent be load balancer
|
||||
CanLB bool
|
||||
@@ -41,13 +28,18 @@ type Agent struct {
|
||||
// is this agent setup as load balancer
|
||||
IsLb bool
|
||||
|
||||
conn types.Connection
|
||||
conn opampTypes.Connection
|
||||
connMutex sync.Mutex
|
||||
mux sync.RWMutex
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func New(ID string, conn types.Connection) *Agent {
|
||||
return &Agent{ID: ID, StartedAt: time.Now(), CurrentStatus: AgentStatusConnected, conn: conn}
|
||||
// set in agent description when agent is capable of supporting
|
||||
// lb exporter configuration. values: 1 (true) or 0 (false)
|
||||
const lbExporterFlag = "capabilities.lbexporter"
|
||||
|
||||
func New(store sqlstore.SQLStore, orgID string, ID string, conn opampTypes.Connection) *Agent {
|
||||
return &Agent{StorableAgent: types.StorableAgent{OrgID: orgID, Identifiable: types.Identifiable{ID: valuer.GenerateUUID()}, StartedAt: time.Now(), CurrentStatus: types.AgentStatusConnected}, conn: conn, store: store}
|
||||
}
|
||||
|
||||
// Upsert inserts or updates the agent in the database.
|
||||
@@ -55,17 +47,13 @@ func (agent *Agent) Upsert() error {
|
||||
agent.mux.Lock()
|
||||
defer agent.mux.Unlock()
|
||||
|
||||
_, err := db.NamedExec(`INSERT OR REPLACE INTO agents (
|
||||
agent_id,
|
||||
started_at,
|
||||
effective_config,
|
||||
current_status
|
||||
) VALUES (
|
||||
:agent_id,
|
||||
:started_at,
|
||||
:effective_config,
|
||||
:current_status
|
||||
)`, agent)
|
||||
_, err := agent.store.BunDB().NewInsert().
|
||||
Model(&agent.StorableAgent).
|
||||
On("CONFLICT (org_id, id) DO UPDATE").
|
||||
Set("started_at = EXCLUDED.started_at").
|
||||
Set("effective_config = EXCLUDED.effective_config").
|
||||
Set("current_status = EXCLUDED.current_status").
|
||||
Exec(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -135,11 +123,11 @@ func (agent *Agent) updateAgentDescription(newStatus *protobufs.AgentToServer) (
|
||||
// todo: need to address multiple agent scenario here
|
||||
// for now, the first response will be sent back to the UI
|
||||
if agent.Status.RemoteConfigStatus.Status == protobufs.RemoteConfigStatuses_RemoteConfigStatuses_APPLIED {
|
||||
onConfigSuccess(agent.ID, string(agent.Status.RemoteConfigStatus.LastRemoteConfigHash))
|
||||
onConfigSuccess(agent.OrgID, agent.ID.StringValue(), string(agent.Status.RemoteConfigStatus.LastRemoteConfigHash))
|
||||
}
|
||||
|
||||
if agent.Status.RemoteConfigStatus.Status == protobufs.RemoteConfigStatuses_RemoteConfigStatuses_FAILED {
|
||||
onConfigFailure(agent.ID, string(agent.Status.RemoteConfigStatus.LastRemoteConfigHash), agent.Status.RemoteConfigStatus.ErrorMessage)
|
||||
onConfigFailure(agent.OrgID, agent.ID.StringValue(), string(agent.Status.RemoteConfigStatus.LastRemoteConfigHash), agent.Status.RemoteConfigStatus.ErrorMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -269,7 +257,7 @@ func (agent *Agent) processStatusUpdate(
|
||||
agent.SendToAgent(response)
|
||||
|
||||
ListenToConfigUpdate(
|
||||
agent.ID,
|
||||
agent.ID.StringValue(),
|
||||
string(response.RemoteConfig.ConfigHash),
|
||||
configProvider.ReportConfigDeploymentStatus,
|
||||
)
|
||||
@@ -277,9 +265,9 @@ func (agent *Agent) processStatusUpdate(
|
||||
}
|
||||
|
||||
func (agent *Agent) updateRemoteConfig(configProvider AgentConfigProvider) bool {
|
||||
recommendedConfig, confId, err := configProvider.RecommendAgentConfig([]byte(agent.EffectiveConfig))
|
||||
recommendedConfig, confId, err := configProvider.RecommendAgentConfig(agent.OrgID, []byte(agent.EffectiveConfig))
|
||||
if err != nil {
|
||||
zap.L().Error("could not generate config recommendation for agent", zap.String("agentID", agent.ID), zap.Error(err))
|
||||
zap.L().Error("could not generate config recommendation for agent", zap.String("agentID", agent.ID.StringValue()), zap.Error(err))
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
signozTypes "github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/open-telemetry/opamp-go/protobufs"
|
||||
"github.com/open-telemetry/opamp-go/server/types"
|
||||
"github.com/pkg/errors"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var db *sqlx.DB
|
||||
|
||||
var AllAgents = Agents{
|
||||
agentsById: map[string]*Agent{},
|
||||
connections: map[types.Connection]map[string]bool{},
|
||||
@@ -23,6 +23,7 @@ type Agents struct {
|
||||
mux sync.RWMutex
|
||||
agentsById map[string]*Agent
|
||||
connections map[types.Connection]map[string]bool
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func (a *Agents) Count() int {
|
||||
@@ -30,15 +31,14 @@ func (a *Agents) Count() int {
|
||||
}
|
||||
|
||||
// Initialize the database and create schema if needed
|
||||
func InitDB(qsDB *sqlx.DB) (*sqlx.DB, error) {
|
||||
db = qsDB
|
||||
func InitDB(sqlStore sqlstore.SQLStore) {
|
||||
|
||||
AllAgents = Agents{
|
||||
agentsById: make(map[string]*Agent),
|
||||
connections: make(map[types.Connection]map[string]bool),
|
||||
mux: sync.RWMutex{},
|
||||
store: sqlStore,
|
||||
}
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// RemoveConnection removes the connection all Agent instances associated with the
|
||||
@@ -49,7 +49,7 @@ func (agents *Agents) RemoveConnection(conn types.Connection) {
|
||||
|
||||
for instanceId := range agents.connections[conn] {
|
||||
agent := agents.agentsById[instanceId]
|
||||
agent.CurrentStatus = AgentStatusDisconnected
|
||||
agent.CurrentStatus = signozTypes.AgentStatusDisconnected
|
||||
agent.TerminatedAt = time.Now()
|
||||
_ = agent.Upsert()
|
||||
delete(agents.agentsById, instanceId)
|
||||
@@ -67,27 +67,43 @@ func (agents *Agents) FindAgent(agentID string) *Agent {
|
||||
// FindOrCreateAgent returns the Agent instance associated with the given agentID.
|
||||
// If the Agent instance does not exist, it is created and added to the list of
|
||||
// Agent instances.
|
||||
func (agents *Agents) FindOrCreateAgent(agentID string, conn types.Connection) (*Agent, bool, error) {
|
||||
func (agents *Agents) FindOrCreateAgent(agentID string, conn types.Connection, orgId string) (*Agent, bool, error) {
|
||||
agents.mux.Lock()
|
||||
defer agents.mux.Unlock()
|
||||
var created bool
|
||||
agent, ok := agents.agentsById[agentID]
|
||||
var err error
|
||||
if !ok || agent == nil {
|
||||
agent = New(agentID, conn)
|
||||
err = agent.Upsert()
|
||||
if err != nil {
|
||||
return nil, created, err
|
||||
}
|
||||
agents.agentsById[agentID] = agent
|
||||
|
||||
if agents.connections[conn] == nil {
|
||||
agents.connections[conn] = map[string]bool{}
|
||||
}
|
||||
agents.connections[conn][agentID] = true
|
||||
created = true
|
||||
if ok && agent != nil {
|
||||
return agent, false, nil
|
||||
}
|
||||
return agent, created, nil
|
||||
|
||||
// This is for single org mode
|
||||
if orgId == "SIGNOZ##DEFAULT##ORG##ID" {
|
||||
err := agents.store.BunDB().NewSelect().
|
||||
Model((*signozTypes.Organization)(nil)).
|
||||
ColumnExpr("id").
|
||||
Limit(1).
|
||||
Scan(context.Background(), &orgId)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
}
|
||||
|
||||
if !ok && orgId == "" {
|
||||
return nil, false, errors.New("cannot create agent without orgId")
|
||||
}
|
||||
|
||||
agent = New(agents.store, orgId, agentID, conn)
|
||||
err := agent.Upsert()
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
agents.agentsById[agentID] = agent
|
||||
|
||||
if agents.connections[conn] == nil {
|
||||
agents.connections[conn] = map[string]bool{}
|
||||
}
|
||||
agents.connections[conn][agentID] = true
|
||||
return agent, true, nil
|
||||
}
|
||||
|
||||
func (agents *Agents) GetAllAgents() []*Agent {
|
||||
@@ -108,18 +124,19 @@ func (agents *Agents) RecommendLatestConfigToAll(
|
||||
) error {
|
||||
for _, agent := range agents.GetAllAgents() {
|
||||
newConfig, confId, err := provider.RecommendAgentConfig(
|
||||
agent.OrgID,
|
||||
[]byte(agent.EffectiveConfig),
|
||||
)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, fmt.Sprintf(
|
||||
"could not generate conf recommendation for %v", agent.ID,
|
||||
"could not generate conf recommendation for %v", agent.ID.StringValue(),
|
||||
))
|
||||
}
|
||||
|
||||
// Recommendation is same as current config
|
||||
if string(newConfig) == agent.EffectiveConfig {
|
||||
zap.L().Info(
|
||||
"Recommended config same as current effective config for agent", zap.String("agentID", agent.ID),
|
||||
"Recommended config same as current effective config for agent", zap.String("agentID", agent.ID.StringValue()),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
@@ -144,7 +161,7 @@ func (agents *Agents) RecommendLatestConfigToAll(
|
||||
RemoteConfig: newRemoteConfig,
|
||||
})
|
||||
|
||||
ListenToConfigUpdate(agent.ID, confId, provider.ReportConfigDeploymentStatus)
|
||||
ListenToConfigUpdate(agent.ID.StringValue(), confId, provider.ReportConfigDeploymentStatus)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ package model
|
||||
type AgentConfigProvider interface {
|
||||
// Generate recommended config for an agent based on its `currentConfYaml`
|
||||
// and current state of user facing settings for agent based features.
|
||||
RecommendAgentConfig(currentConfYaml []byte) (
|
||||
RecommendAgentConfig(orgId string, currentConfYaml []byte) (
|
||||
recommendedConfYaml []byte,
|
||||
// Opaque id of the recommended config, used for reporting deployment status updates
|
||||
configId string,
|
||||
@@ -13,6 +13,7 @@ type AgentConfigProvider interface {
|
||||
|
||||
// Report deployment status for config recommendations generated by RecommendAgentConfig
|
||||
ReportConfigDeploymentStatus(
|
||||
orgId string,
|
||||
agentId string,
|
||||
configId string,
|
||||
err error,
|
||||
|
||||
@@ -15,7 +15,7 @@ func init() {
|
||||
}
|
||||
}
|
||||
|
||||
type OnChangeCallback func(agentId string, hash string, err error)
|
||||
type OnChangeCallback func(orgId string, agentId string, hash string, err error)
|
||||
|
||||
// responsible for managing subscribers on config change
|
||||
type Coordinator struct {
|
||||
@@ -25,16 +25,16 @@ type Coordinator struct {
|
||||
subscribers map[string][]OnChangeCallback
|
||||
}
|
||||
|
||||
func onConfigSuccess(agentId string, hash string) {
|
||||
notifySubscribers(agentId, hash, nil)
|
||||
func onConfigSuccess(orgId string, agentId string, hash string) {
|
||||
notifySubscribers(orgId, agentId, hash, nil)
|
||||
}
|
||||
|
||||
func onConfigFailure(agentId string, hash string, errorMessage string) {
|
||||
notifySubscribers(agentId, hash, fmt.Errorf(errorMessage))
|
||||
func onConfigFailure(orgId string, agentId string, hash string, errorMessage string) {
|
||||
notifySubscribers(orgId, agentId, hash, fmt.Errorf(errorMessage))
|
||||
}
|
||||
|
||||
// OnSuccess listens to config changes and notifies subscribers
|
||||
func notifySubscribers(agentId string, hash string, err error) {
|
||||
func notifySubscribers(orgId string, agentId string, hash string, err error) {
|
||||
// this method currently does not handle multi-agent scenario.
|
||||
// as soon as a message is delivered, we release all the subscribers
|
||||
// for a given hash
|
||||
@@ -44,7 +44,7 @@ func notifySubscribers(agentId string, hash string, err error) {
|
||||
}
|
||||
|
||||
for _, s := range subs {
|
||||
s(agentId, hash, err)
|
||||
s(orgId, agentId, hash, err)
|
||||
}
|
||||
|
||||
// delete all subscribers for this hash, assume future
|
||||
|
||||
@@ -2,6 +2,7 @@ package opamp
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
model "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/open-telemetry/opamp-go/protobufs"
|
||||
@@ -53,6 +54,7 @@ func (srv *Server) Start(listener string) error {
|
||||
ListenEndpoint: listener,
|
||||
}
|
||||
|
||||
// This will have to send request to all the agents of all tenants
|
||||
unsubscribe := srv.agentConfigProvider.SubscribeToConfigUpdates(func() {
|
||||
err := srv.agents.RecommendLatestConfigToAll(srv.agentConfigProvider)
|
||||
if err != nil {
|
||||
@@ -78,20 +80,46 @@ func (srv *Server) onDisconnect(conn types.Connection) {
|
||||
srv.agents.RemoveConnection(conn)
|
||||
}
|
||||
|
||||
// When the agent sends the message for the first time, then we need to know the orgID
|
||||
// For the subsequent requests, agents don't send the attributes unless something is changed
|
||||
// but we keep them in context mapped which is mapped to the instanceID, so we would know the
|
||||
// orgID from the context
|
||||
func (srv *Server) OnMessage(conn types.Connection, msg *protobufs.AgentToServer) *protobufs.ServerToAgent {
|
||||
agentID := msg.InstanceUid
|
||||
|
||||
agent, created, err := srv.agents.FindOrCreateAgent(agentID, conn)
|
||||
var orgId string
|
||||
if msg.AgentDescription != nil && msg.AgentDescription.IdentifyingAttributes != nil {
|
||||
for _, attr := range msg.AgentDescription.IdentifyingAttributes {
|
||||
if attr.Key == "orgId" {
|
||||
orgId = attr.Value.GetStringValue()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
agent, created, err := srv.agents.FindOrCreateAgent(agentID, conn, orgId)
|
||||
if err != nil {
|
||||
zap.L().Error("Failed to find or create agent", zap.String("agentID", agentID), zap.Error(err))
|
||||
// TODO: handle error
|
||||
|
||||
// Return error response according to OpAMP protocol
|
||||
return &protobufs.ServerToAgent{
|
||||
InstanceUid: agentID,
|
||||
ErrorResponse: &protobufs.ServerErrorResponse{
|
||||
Type: protobufs.ServerErrorResponseType_ServerErrorResponseType_Unavailable,
|
||||
Details: &protobufs.ServerErrorResponse_RetryInfo{
|
||||
RetryInfo: &protobufs.RetryInfo{
|
||||
RetryAfterNanoseconds: uint64(5 * time.Second), // minimum recommended retry interval
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if created {
|
||||
agent.CanLB = model.ExtractLbFlag(msg.AgentDescription)
|
||||
zap.L().Debug(
|
||||
"New agent added", zap.Bool("canLb", agent.CanLB),
|
||||
zap.String("ID", agent.ID),
|
||||
zap.String("ID", agent.ID.StringValue()),
|
||||
zap.Any("status", agent.CurrentStatus),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/gorilla/mux"
|
||||
promModel "github.com/prometheus/common/model"
|
||||
"go.uber.org/multierr"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/metrics"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -34,6 +35,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
querytemplate "github.com/SigNoz/signoz/pkg/query-service/utils/queryTemplate"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
chVariables "github.com/SigNoz/signoz/pkg/variables/clickhouse"
|
||||
)
|
||||
|
||||
var allowedFunctions = []string{"count", "ratePerSec", "sum", "avg", "min", "max", "p50", "p90", "p95", "p99"}
|
||||
@@ -841,6 +843,29 @@ func validateExpressions(expressions []string, funcs map[string]govaluate.Expres
|
||||
return errs
|
||||
}
|
||||
|
||||
// chTransformQuery transforms the clickhouse query with the given variables
|
||||
// it is used to check what would be the query if variables are selected as __all__.
|
||||
// for now, this is just a pass through, but in the future, we will use it to
|
||||
// dashboard variables
|
||||
// TODO(srikanthccv): version based query replacement
|
||||
func chTransformQuery(query string, variables map[string]interface{}) {
|
||||
varsForTransform := make([]chVariables.VariableValue, 0, len(variables))
|
||||
for name := range variables {
|
||||
varsForTransform = append(varsForTransform, chVariables.VariableValue{
|
||||
Name: name,
|
||||
Values: []string{"__all__"},
|
||||
IsSelectAll: true,
|
||||
FieldType: "scalar",
|
||||
})
|
||||
}
|
||||
transformer := chVariables.NewQueryTransformer(query, varsForTransform)
|
||||
transformedQuery, err := transformer.Transform()
|
||||
if err != nil {
|
||||
zap.L().Warn("failed to transform clickhouse query", zap.Error(err))
|
||||
}
|
||||
zap.L().Info("transformed clickhouse query", zap.String("transformedQuery", transformedQuery), zap.String("originalQuery", query))
|
||||
}
|
||||
|
||||
func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiError) {
|
||||
|
||||
var queryRangeParams *v3.QueryRangeParamsV3
|
||||
@@ -979,6 +1004,7 @@ func ParseQueryRangeParams(r *http.Request) (*v3.QueryRangeParamsV3, *model.ApiE
|
||||
continue
|
||||
}
|
||||
|
||||
chTransformQuery(chQuery.Query, queryRangeParams.Variables)
|
||||
for name, value := range queryRangeParams.Variables {
|
||||
chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("{{%s}}", name), fmt.Sprint(value), -1)
|
||||
chQuery.Query = strings.Replace(chQuery.Query, fmt.Sprintf("[[%s]]", name), fmt.Sprint(value), -1)
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
package preferences
|
||||
|
||||
var preferenceMap = map[string]Preference{
|
||||
"ORG_ONBOARDING": {
|
||||
Key: "ORG_ONBOARDING",
|
||||
Name: "Organisation Onboarding",
|
||||
Description: "Organisation Onboarding",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"org"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_DO_LATER": {
|
||||
Key: "WELCOME_CHECKLIST_DO_LATER",
|
||||
Name: "Welcome Checklist Do Later",
|
||||
Description: "Welcome Checklist Do Later",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SEND_LOGS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SEND_LOGS_SKIPPED",
|
||||
Name: "Welcome Checklist Send Logs Skipped",
|
||||
Description: "Welcome Checklist Send Logs Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SEND_TRACES_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SEND_TRACES_SKIPPED",
|
||||
Name: "Welcome Checklist Send Traces Skipped",
|
||||
Description: "Welcome Checklist Send Traces Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED",
|
||||
Name: "Welcome Checklist Send Infra Metrics Skipped",
|
||||
Description: "Welcome Checklist Send Infra Metrics Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED",
|
||||
Name: "Welcome Checklist Setup Dashboards Skipped",
|
||||
Description: "Welcome Checklist Setup Dashboards Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED",
|
||||
Name: "Welcome Checklist Setup Alerts Skipped",
|
||||
Description: "Welcome Checklist Setup Alerts Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED",
|
||||
Name: "Welcome Checklist Setup Saved View Skipped",
|
||||
Description: "Welcome Checklist Setup Saved View Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
}
|
||||
@@ -1,500 +0,0 @@
|
||||
package preferences
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/jmoiron/sqlx"
|
||||
)
|
||||
|
||||
type Range struct {
|
||||
Min int64 `json:"min"`
|
||||
Max int64 `json:"max"`
|
||||
}
|
||||
|
||||
type Preference struct {
|
||||
Key string `json:"key"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
ValueType string `json:"valueType"`
|
||||
DefaultValue interface{} `json:"defaultValue"`
|
||||
AllowedValues []interface{} `json:"allowedValues"`
|
||||
IsDiscreteValues bool `json:"isDiscreteValues"`
|
||||
Range Range `json:"range"`
|
||||
AllowedScopes []string `json:"allowedScopes"`
|
||||
}
|
||||
|
||||
func (p *Preference) ErrorValueTypeMismatch() *model.ApiError {
|
||||
return &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("the preference value is not of expected type: %s", p.ValueType)}
|
||||
}
|
||||
|
||||
const (
|
||||
PreferenceValueTypeInteger string = "integer"
|
||||
PreferenceValueTypeFloat string = "float"
|
||||
PreferenceValueTypeString string = "string"
|
||||
PreferenceValueTypeBoolean string = "boolean"
|
||||
)
|
||||
|
||||
const (
|
||||
OrgAllowedScope string = "org"
|
||||
UserAllowedScope string = "user"
|
||||
)
|
||||
|
||||
func (p *Preference) checkIfInAllowedValues(preferenceValue interface{}) (bool, *model.ApiError) {
|
||||
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeInteger:
|
||||
_, ok := preferenceValue.(int64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeFloat:
|
||||
_, ok := preferenceValue.(float64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
_, ok := preferenceValue.(string)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeBoolean:
|
||||
_, ok := preferenceValue.(bool)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
}
|
||||
isInAllowedValues := false
|
||||
for _, value := range p.AllowedValues {
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeInteger:
|
||||
allowedValue, ok := value.(int64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
case PreferenceValueTypeFloat:
|
||||
allowedValue, ok := value.(float64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
allowedValue, ok := value.(string)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
case PreferenceValueTypeBoolean:
|
||||
allowedValue, ok := value.(bool)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return isInAllowedValues, nil
|
||||
}
|
||||
|
||||
func (p *Preference) IsValidValue(preferenceValue interface{}) *model.ApiError {
|
||||
typeSafeValue := preferenceValue
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeInteger:
|
||||
val, ok := preferenceValue.(int64)
|
||||
if !ok {
|
||||
floatVal, ok := preferenceValue.(float64)
|
||||
if !ok || floatVal != float64(int64(floatVal)) {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
val = int64(floatVal)
|
||||
typeSafeValue = val
|
||||
}
|
||||
if !p.IsDiscreteValues {
|
||||
if val < p.Range.Min || val > p.Range.Max {
|
||||
return &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("the preference value is not in the range specified, min: %v , max:%v", p.Range.Min, p.Range.Max)}
|
||||
}
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
_, ok := preferenceValue.(string)
|
||||
if !ok {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeFloat:
|
||||
_, ok := preferenceValue.(float64)
|
||||
if !ok {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeBoolean:
|
||||
_, ok := preferenceValue.(bool)
|
||||
if !ok {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
}
|
||||
|
||||
// check the validity of the value being part of allowed values or the range specified if any
|
||||
if p.IsDiscreteValues {
|
||||
if p.AllowedValues != nil {
|
||||
isInAllowedValues, valueMisMatchErr := p.checkIfInAllowedValues(typeSafeValue)
|
||||
|
||||
if valueMisMatchErr != nil {
|
||||
return valueMisMatchErr
|
||||
}
|
||||
if !isInAllowedValues {
|
||||
return &model.ApiError{Typ: model.ErrorBadData, Err: fmt.Errorf("the preference value is not in the list of allowedValues: %v", p.AllowedValues)}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Preference) IsEnabledForScope(scope string) bool {
|
||||
isPreferenceEnabledForGivenScope := false
|
||||
if p.AllowedScopes != nil {
|
||||
for _, allowedScope := range p.AllowedScopes {
|
||||
if allowedScope == strings.ToLower(scope) {
|
||||
isPreferenceEnabledForGivenScope = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return isPreferenceEnabledForGivenScope
|
||||
}
|
||||
|
||||
func (p *Preference) SanitizeValue(preferenceValue interface{}) interface{} {
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeBoolean:
|
||||
if preferenceValue == "1" || preferenceValue == true {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
default:
|
||||
return preferenceValue
|
||||
}
|
||||
}
|
||||
|
||||
type AllPreferences struct {
|
||||
Preference
|
||||
Value interface{} `json:"value"`
|
||||
}
|
||||
|
||||
type PreferenceKV struct {
|
||||
PreferenceId string `json:"preference_id" db:"preference_id"`
|
||||
PreferenceValue interface{} `json:"preference_value" db:"preference_value"`
|
||||
}
|
||||
|
||||
type UpdatePreference struct {
|
||||
PreferenceValue interface{} `json:"preference_value"`
|
||||
}
|
||||
|
||||
var db *sqlx.DB
|
||||
|
||||
func InitDB(inputDB *sqlx.DB) error {
|
||||
db = inputDB
|
||||
return nil
|
||||
}
|
||||
|
||||
// org preference functions
|
||||
func GetOrgPreference(ctx context.Context, preferenceId string, orgId string) (*PreferenceKV, *model.ApiError) {
|
||||
// check if the preference key exists or not
|
||||
preference, seen := preferenceMap[preferenceId]
|
||||
if !seen {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
||||
}
|
||||
|
||||
// check if the preference is enabled for org scope or not
|
||||
isPreferenceEnabled := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
if !isPreferenceEnabled {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at org scope: %s", preferenceId)}
|
||||
}
|
||||
|
||||
// fetch the value from the database
|
||||
var orgPreference PreferenceKV
|
||||
query := `SELECT preference_id , preference_value FROM org_preference WHERE preference_id=$1 AND org_id=$2;`
|
||||
err := db.Get(&orgPreference, query, preferenceId, orgId)
|
||||
|
||||
// if the value doesn't exist in db then return the default value
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preference.DefaultValue,
|
||||
}, nil
|
||||
}
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in fetching the org preference: %s", err.Error())}
|
||||
|
||||
}
|
||||
|
||||
// else return the value fetched from the org_preference table
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preference.SanitizeValue(orgPreference.PreferenceValue),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func UpdateOrgPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, orgId string) (*PreferenceKV, *model.ApiError) {
|
||||
// check if the preference key exists or not
|
||||
preference, seen := preferenceMap[preferenceId]
|
||||
if !seen {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
||||
}
|
||||
|
||||
// check if the preference is enabled at org scope or not
|
||||
isPreferenceEnabled := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
if !isPreferenceEnabled {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at org scope: %s", preferenceId)}
|
||||
}
|
||||
|
||||
err := preference.IsValidValue(preferenceValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update the values in the org_preference table and return the key and the value
|
||||
query := `INSERT INTO org_preference(preference_id,preference_value,org_id) VALUES($1,$2,$3)
|
||||
ON CONFLICT(preference_id,org_id) DO
|
||||
UPDATE SET preference_value= $2 WHERE preference_id=$1 AND org_id=$3;`
|
||||
|
||||
_, dberr := db.Exec(query, preferenceId, preferenceValue, orgId)
|
||||
|
||||
if dberr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in setting the preference value: %s", dberr.Error())}
|
||||
}
|
||||
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preferenceValue,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func GetAllOrgPreferences(ctx context.Context, orgId string) (*[]AllPreferences, *model.ApiError) {
|
||||
// filter out all the org enabled preferences from the preference variable
|
||||
allOrgPreferences := []AllPreferences{}
|
||||
|
||||
// fetch all the org preference values stored in org_preference table
|
||||
orgPreferenceValues := []PreferenceKV{}
|
||||
|
||||
query := `SELECT preference_id,preference_value FROM org_preference WHERE org_id=$1;`
|
||||
err := db.Select(&orgPreferenceValues, query, orgId)
|
||||
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting all org preference values: %s", err)}
|
||||
}
|
||||
|
||||
// create a map of key vs values from the above response
|
||||
preferenceValueMap := map[string]interface{}{}
|
||||
|
||||
for _, preferenceValue := range orgPreferenceValues {
|
||||
preferenceValueMap[preferenceValue.PreferenceId] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
// update in the above filtered list wherver value present in the map
|
||||
for _, preference := range preferenceMap {
|
||||
isEnabledForOrgScope := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
if isEnabledForOrgScope {
|
||||
preferenceWithValue := AllPreferences{}
|
||||
preferenceWithValue.Key = preference.Key
|
||||
preferenceWithValue.Name = preference.Name
|
||||
preferenceWithValue.Description = preference.Description
|
||||
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
||||
preferenceWithValue.AllowedValues = preference.AllowedValues
|
||||
preferenceWithValue.DefaultValue = preference.DefaultValue
|
||||
preferenceWithValue.Range = preference.Range
|
||||
preferenceWithValue.ValueType = preference.ValueType
|
||||
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
||||
value, seen := preferenceValueMap[preference.Key]
|
||||
|
||||
if seen {
|
||||
preferenceWithValue.Value = value
|
||||
} else {
|
||||
preferenceWithValue.Value = preference.DefaultValue
|
||||
}
|
||||
|
||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||
allOrgPreferences = append(allOrgPreferences, preferenceWithValue)
|
||||
}
|
||||
}
|
||||
return &allOrgPreferences, nil
|
||||
}
|
||||
|
||||
// user preference functions
|
||||
func GetUserPreference(ctx context.Context, preferenceId string, orgId string, userId string) (*PreferenceKV, *model.ApiError) {
|
||||
// check if the preference key exists
|
||||
preference, seen := preferenceMap[preferenceId]
|
||||
if !seen {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
||||
}
|
||||
|
||||
preferenceValue := PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preference.DefaultValue,
|
||||
}
|
||||
|
||||
// check if the preference is enabled at user scope
|
||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(UserAllowedScope)
|
||||
if !isPreferenceEnabledAtUserScope {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at user scope: %s", preferenceId)}
|
||||
}
|
||||
|
||||
isPreferenceEnabledAtOrgScope := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
// get the value from the org scope if enabled at org scope
|
||||
if isPreferenceEnabledAtOrgScope {
|
||||
orgPreference := PreferenceKV{}
|
||||
|
||||
query := `SELECT preference_id , preference_value FROM org_preference WHERE preference_id=$1 AND org_id=$2;`
|
||||
|
||||
err := db.Get(&orgPreference, query, preferenceId, orgId)
|
||||
|
||||
// if there is error in getting values and its not an empty rows error return from here
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting org preference values: %s", err.Error())}
|
||||
}
|
||||
|
||||
// if there is no error update the preference value with value from org preference
|
||||
if err == nil {
|
||||
preferenceValue.PreferenceValue = orgPreference.PreferenceValue
|
||||
}
|
||||
}
|
||||
|
||||
// get the value from the user_preference table, if exists return this value else the one calculated in the above step
|
||||
userPreference := PreferenceKV{}
|
||||
|
||||
query := `SELECT preference_id, preference_value FROM user_preference WHERE preference_id=$1 AND user_id=$2;`
|
||||
err := db.Get(&userPreference, query, preferenceId, userId)
|
||||
|
||||
if err != nil && err != sql.ErrNoRows {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting user preference values: %s", err.Error())}
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
preferenceValue.PreferenceValue = userPreference.PreferenceValue
|
||||
}
|
||||
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceValue.PreferenceId,
|
||||
PreferenceValue: preference.SanitizeValue(preferenceValue.PreferenceValue),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func UpdateUserPreference(ctx context.Context, preferenceId string, preferenceValue interface{}, userId string) (*PreferenceKV, *model.ApiError) {
|
||||
// check if the preference id is valid
|
||||
preference, seen := preferenceMap[preferenceId]
|
||||
if !seen {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("no such preferenceId exists: %s", preferenceId)}
|
||||
}
|
||||
|
||||
// check if the preference is enabled at user scope
|
||||
isPreferenceEnabledAtUserScope := preference.IsEnabledForScope(UserAllowedScope)
|
||||
if !isPreferenceEnabledAtUserScope {
|
||||
return nil, &model.ApiError{Typ: model.ErrorNotFound, Err: fmt.Errorf("preference is not enabled at user scope: %s", preferenceId)}
|
||||
}
|
||||
|
||||
err := preference.IsValidValue(preferenceValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// update the user preference values
|
||||
query := `INSERT INTO user_preference(preference_id,preference_value,user_id) VALUES($1,$2,$3)
|
||||
ON CONFLICT(preference_id,user_id) DO
|
||||
UPDATE SET preference_value= $2 WHERE preference_id=$1 AND user_id=$3;`
|
||||
|
||||
_, dberrr := db.Exec(query, preferenceId, preferenceValue, userId)
|
||||
|
||||
if dberrr != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in setting the preference value: %s", dberrr.Error())}
|
||||
}
|
||||
|
||||
return &PreferenceKV{
|
||||
PreferenceId: preferenceId,
|
||||
PreferenceValue: preferenceValue,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func GetAllUserPreferences(ctx context.Context, orgId string, userId string) (*[]AllPreferences, *model.ApiError) {
|
||||
allUserPreferences := []AllPreferences{}
|
||||
|
||||
// fetch all the org preference values stored in org_preference table
|
||||
orgPreferenceValues := []PreferenceKV{}
|
||||
|
||||
query := `SELECT preference_id,preference_value FROM org_preference WHERE org_id=$1;`
|
||||
err := db.Select(&orgPreferenceValues, query, orgId)
|
||||
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting all org preference values: %s", err)}
|
||||
}
|
||||
|
||||
// create a map of key vs values from the above response
|
||||
preferenceOrgValueMap := map[string]interface{}{}
|
||||
|
||||
for _, preferenceValue := range orgPreferenceValues {
|
||||
preferenceOrgValueMap[preferenceValue.PreferenceId] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
// fetch all the user preference values stored in user_preference table
|
||||
userPreferenceValues := []PreferenceKV{}
|
||||
|
||||
query = `SELECT preference_id,preference_value FROM user_preference WHERE user_id=$1;`
|
||||
err = db.Select(&userPreferenceValues, query, userId)
|
||||
|
||||
if err != nil {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: fmt.Errorf("error in getting all user preference values: %s", err)}
|
||||
}
|
||||
|
||||
// create a map of key vs values from the above response
|
||||
preferenceUserValueMap := map[string]interface{}{}
|
||||
|
||||
for _, preferenceValue := range userPreferenceValues {
|
||||
preferenceUserValueMap[preferenceValue.PreferenceId] = preferenceValue.PreferenceValue
|
||||
}
|
||||
|
||||
// update in the above filtered list wherver value present in the map
|
||||
for _, preference := range preferenceMap {
|
||||
isEnabledForUserScope := preference.IsEnabledForScope(UserAllowedScope)
|
||||
|
||||
if isEnabledForUserScope {
|
||||
preferenceWithValue := AllPreferences{}
|
||||
preferenceWithValue.Key = preference.Key
|
||||
preferenceWithValue.Name = preference.Name
|
||||
preferenceWithValue.Description = preference.Description
|
||||
preferenceWithValue.AllowedScopes = preference.AllowedScopes
|
||||
preferenceWithValue.AllowedValues = preference.AllowedValues
|
||||
preferenceWithValue.DefaultValue = preference.DefaultValue
|
||||
preferenceWithValue.Range = preference.Range
|
||||
preferenceWithValue.ValueType = preference.ValueType
|
||||
preferenceWithValue.IsDiscreteValues = preference.IsDiscreteValues
|
||||
preferenceWithValue.Value = preference.DefaultValue
|
||||
|
||||
isEnabledForOrgScope := preference.IsEnabledForScope(OrgAllowedScope)
|
||||
if isEnabledForOrgScope {
|
||||
value, seen := preferenceOrgValueMap[preference.Key]
|
||||
if seen {
|
||||
preferenceWithValue.Value = value
|
||||
}
|
||||
}
|
||||
|
||||
value, seen := preferenceUserValueMap[preference.Key]
|
||||
|
||||
if seen {
|
||||
preferenceWithValue.Value = value
|
||||
}
|
||||
|
||||
preferenceWithValue.Value = preference.SanitizeValue(preferenceWithValue.Value)
|
||||
allUserPreferences = append(allUserPreferences, preferenceWithValue)
|
||||
}
|
||||
}
|
||||
return &allUserPreferences, nil
|
||||
}
|
||||
@@ -14,6 +14,8 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
preferencecore "github.com/SigNoz/signoz/pkg/modules/preference/core"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
@@ -22,11 +24,11 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/preferences"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/preferencetypes"
|
||||
"github.com/SigNoz/signoz/pkg/web"
|
||||
"github.com/rs/cors"
|
||||
"github.com/soheilhy/cmux"
|
||||
@@ -97,10 +99,6 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := preferences.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := dashboards.InitDB(serverOptions.SigNoz.SQLStore); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -120,7 +118,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
}
|
||||
|
||||
clickhouseReader := clickhouseReader.NewReader(
|
||||
serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
serverOptions.SigNoz.SQLStore,
|
||||
serverOptions.SigNoz.TelemetryStore.ClickHouseDB(),
|
||||
serverOptions.PromConfigPath,
|
||||
fm,
|
||||
@@ -151,6 +149,8 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
c = cache.NewCache(cacheOpts)
|
||||
}
|
||||
|
||||
preference := preference.NewAPI(preferencecore.NewPreference(preferencecore.NewStore(serverOptions.SigNoz.SQLStore), preferencetypes.NewDefaultPreferenceMap()))
|
||||
|
||||
<-readerReady
|
||||
rm, err := makeRulesManager(
|
||||
serverOptions.RuleRepoURL,
|
||||
@@ -207,6 +207,7 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
JWT: serverOptions.Jwt,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(serverOptions.SigNoz.Alertmanager),
|
||||
Signoz: serverOptions.SigNoz,
|
||||
Preference: preference,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -235,13 +236,10 @@ func NewServer(serverOptions *ServerOptions) (*Server, error) {
|
||||
|
||||
s.privateHTTP = privateServer
|
||||
|
||||
_, err = opAmpModel.InitDB(serverOptions.SigNoz.SQLStore.SQLxDB())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opAmpModel.InitDB(serverOptions.SigNoz.SQLStore)
|
||||
|
||||
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
|
||||
DB: serverOptions.SigNoz.SQLStore.SQLxDB(),
|
||||
Store: serverOptions.SigNoz.SQLStore,
|
||||
AgentFeatures: []agentConf.AgentFeature{
|
||||
logParsingPipelineController,
|
||||
},
|
||||
|
||||
@@ -89,13 +89,13 @@ func (aH *APIHandler) GetTreeMap(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
params, apiError := explorer.ParseTreeMapMetricsParams(r)
|
||||
if apiError != nil {
|
||||
zap.L().Error("error parsing heatmap metric params", zap.Error(apiError.Err))
|
||||
zap.L().Error("error parsing tree map metric params", zap.Error(apiError.Err))
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
result, apiError := aH.SummaryService.GetMetricsTreemap(ctx, params)
|
||||
if apiError != nil {
|
||||
zap.L().Error("error getting heatmap data", zap.Error(apiError.Err))
|
||||
zap.L().Error("error getting tree map data", zap.Error(apiError.Err))
|
||||
RespondError(w, apiError, nil)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -329,6 +329,9 @@ func CreateResetPasswordToken(ctx context.Context, userId string) (*types.ResetP
|
||||
}
|
||||
|
||||
req := &types.ResetPasswordRequest{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
UserID: userId,
|
||||
Token: token,
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
@@ -48,6 +49,7 @@ func (mds *ModelDaoSqlite) GetApdexSettings(ctx context.Context, orgID string, s
|
||||
func (mds *ModelDaoSqlite) SetApdexSettings(ctx context.Context, orgID string, apdexSettings *types.ApdexSettings) *model.ApiError {
|
||||
// Set the org_id from the parameter since it's required for the foreign key constraint
|
||||
apdexSettings.OrgID = orgID
|
||||
apdexSettings.Identifiable.ID = valuer.GenerateUUID()
|
||||
|
||||
_, err := mds.bundb.NewInsert().
|
||||
Model(apdexSettings).
|
||||
|
||||
@@ -23,7 +23,7 @@ type Reader interface {
|
||||
GetServicesList(ctx context.Context) (*[]string, error)
|
||||
GetDependencyGraph(ctx context.Context, query *model.GetServicesParams) (*[]model.ServiceMapDependencyResponseItem, error)
|
||||
|
||||
GetTTL(ctx context.Context, ttlParams *model.GetTTLParams) (*model.GetTTLResponseItem, *model.ApiError)
|
||||
GetTTL(ctx context.Context, orgID string, ttlParams *model.GetTTLParams) (*model.GetTTLResponseItem, *model.ApiError)
|
||||
|
||||
// GetDisks returns a list of disks configured in the underlying DB. It is supported by
|
||||
// clickhouse only.
|
||||
@@ -45,7 +45,7 @@ type Reader interface {
|
||||
GetFlamegraphSpansForTrace(ctx context.Context, traceID string, req *model.GetFlamegraphSpansForTraceParams) (*model.GetFlamegraphSpansForTraceResponse, *model.ApiError)
|
||||
|
||||
// Setter Interfaces
|
||||
SetTTL(ctx context.Context, ttlParams *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError)
|
||||
SetTTL(ctx context.Context, orgID string, ttlParams *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError)
|
||||
|
||||
FetchTemporality(ctx context.Context, metricNames []string) (map[string]map[v3.Temporality]bool, error)
|
||||
GetMetricAggregateAttributes(ctx context.Context, req *v3.AggregateAttributeRequest, skipDotNames bool, skipSignozMetrics bool) (*v3.AggregateAttributeResponse, error)
|
||||
|
||||
@@ -9,7 +9,7 @@ type SummaryListMetricsRequest struct {
|
||||
Limit int `json:"limit"`
|
||||
OrderBy v3.OrderBy `json:"orderBy"`
|
||||
Start int64 `json:"start"`
|
||||
EndD int64 `json:"end"`
|
||||
End int64 `json:"end"`
|
||||
Filters v3.FilterSet `json:"filters"`
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ type TreeMapMetricsRequest struct {
|
||||
Limit int `json:"limit"`
|
||||
Treemap TreeMapType `json:"treemap"`
|
||||
Start int64 `json:"start"`
|
||||
EndD int64 `json:"end"`
|
||||
End int64 `json:"end"`
|
||||
Filters v3.FilterSet `json:"filters"`
|
||||
}
|
||||
|
||||
|
||||
@@ -317,9 +317,10 @@ func createTelemetry() {
|
||||
|
||||
getLogsInfoInLastHeartBeatInterval, _ := telemetry.reader.GetLogsInfoInLastHeartBeatInterval(ctx, HEART_BEAT_DURATION)
|
||||
|
||||
traceTTL, _ := telemetry.reader.GetTTL(ctx, &model.GetTTLParams{Type: constants.TraceTTL})
|
||||
metricsTTL, _ := telemetry.reader.GetTTL(ctx, &model.GetTTLParams{Type: constants.MetricsTTL})
|
||||
logsTTL, _ := telemetry.reader.GetTTL(ctx, &model.GetTTLParams{Type: constants.LogsTTL})
|
||||
// TODO update this post bootstrap decision
|
||||
traceTTL, _ := telemetry.reader.GetTTL(ctx, "", &model.GetTTLParams{Type: constants.TraceTTL})
|
||||
metricsTTL, _ := telemetry.reader.GetTTL(ctx, "", &model.GetTTLParams{Type: constants.MetricsTTL})
|
||||
logsTTL, _ := telemetry.reader.GetTTL(ctx, "", &model.GetTTLParams{Type: constants.LogsTTL})
|
||||
|
||||
userCount, _ := telemetry.userCountCallback(ctx)
|
||||
|
||||
|
||||
@@ -293,7 +293,7 @@ func NewFilterSuggestionsTestBed(t *testing.T) *FilterSuggestionsTestBed {
|
||||
testDB := utils.NewQueryServiceDBForTests(t)
|
||||
|
||||
fm := featureManager.StartManager()
|
||||
reader, mockClickhouse := NewMockClickhouseReader(t, testDB.SQLxDB(), fm)
|
||||
reader, mockClickhouse := NewMockClickhouseReader(t, testDB, fm)
|
||||
mockClickhouse.MatchExpectationsInOrder(false)
|
||||
|
||||
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
|
||||
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/logparsingpipeline"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp"
|
||||
opampModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
@@ -37,6 +37,9 @@ func TestLogPipelinesLifecycle(t *testing.T) {
|
||||
testbed := NewLogPipelinesTestBed(t, nil)
|
||||
require := require.New(t)
|
||||
|
||||
orgID, err := utils.GetTestOrgId(testbed.sqlStore)
|
||||
require.Nil(err)
|
||||
|
||||
getPipelinesResp := testbed.GetPipelinesFromQS()
|
||||
require.Equal(
|
||||
0, len(getPipelinesResp.Pipelines),
|
||||
@@ -107,7 +110,7 @@ func TestLogPipelinesLifecycle(t *testing.T) {
|
||||
t, postablePipelines, createPipelinesResp,
|
||||
)
|
||||
testbed.assertPipelinesSentToOpampClient(createPipelinesResp.Pipelines)
|
||||
testbed.assertNewAgentGetsPipelinesOnConnection(createPipelinesResp.Pipelines)
|
||||
testbed.assertNewAgentGetsPipelinesOnConnection(orgID, createPipelinesResp.Pipelines)
|
||||
|
||||
// Should be able to get the configured pipelines.
|
||||
getPipelinesResp = testbed.GetPipelinesFromQS()
|
||||
@@ -121,7 +124,7 @@ func TestLogPipelinesLifecycle(t *testing.T) {
|
||||
"pipelines config history should not be empty after 1st configuration",
|
||||
)
|
||||
require.Equal(
|
||||
agentConf.DeployInitiated, getPipelinesResp.History[0].DeployStatus,
|
||||
types.DeployInitiated, getPipelinesResp.History[0].DeployStatus,
|
||||
"pipelines deployment should be in progress after 1st configuration",
|
||||
)
|
||||
|
||||
@@ -133,7 +136,7 @@ func TestLogPipelinesLifecycle(t *testing.T) {
|
||||
t, postablePipelines, getPipelinesResp,
|
||||
)
|
||||
require.Equal(
|
||||
agentConf.Deployed,
|
||||
types.Deployed,
|
||||
getPipelinesResp.History[0].DeployStatus,
|
||||
"pipeline deployment should be complete after acknowledgment from opamp client",
|
||||
)
|
||||
@@ -145,7 +148,7 @@ func TestLogPipelinesLifecycle(t *testing.T) {
|
||||
t, postablePipelines, updatePipelinesResp,
|
||||
)
|
||||
testbed.assertPipelinesSentToOpampClient(updatePipelinesResp.Pipelines)
|
||||
testbed.assertNewAgentGetsPipelinesOnConnection(updatePipelinesResp.Pipelines)
|
||||
testbed.assertNewAgentGetsPipelinesOnConnection(orgID, updatePipelinesResp.Pipelines)
|
||||
|
||||
getPipelinesResp = testbed.GetPipelinesFromQS()
|
||||
require.Equal(
|
||||
@@ -153,7 +156,7 @@ func TestLogPipelinesLifecycle(t *testing.T) {
|
||||
"there should be 2 history entries after posting pipelines config for the 2nd time",
|
||||
)
|
||||
require.Equal(
|
||||
agentConf.DeployInitiated, getPipelinesResp.History[0].DeployStatus,
|
||||
types.DeployInitiated, getPipelinesResp.History[0].DeployStatus,
|
||||
"deployment should be in progress for latest pipeline config",
|
||||
)
|
||||
|
||||
@@ -165,7 +168,7 @@ func TestLogPipelinesLifecycle(t *testing.T) {
|
||||
t, postablePipelines, getPipelinesResp,
|
||||
)
|
||||
require.Equal(
|
||||
agentConf.Deployed,
|
||||
types.Deployed,
|
||||
getPipelinesResp.History[0].DeployStatus,
|
||||
"deployment for latest pipeline config should be complete after acknowledgment from opamp client",
|
||||
)
|
||||
@@ -219,7 +222,7 @@ func TestLogPipelinesHistory(t *testing.T) {
|
||||
testbed.PostPipelinesToQS(postablePipelines)
|
||||
getPipelinesResp = testbed.GetPipelinesFromQS()
|
||||
require.Equal(1, len(getPipelinesResp.History))
|
||||
require.Equal(agentConf.DeployInitiated, getPipelinesResp.History[0].DeployStatus)
|
||||
require.Equal(types.DeployInitiated, getPipelinesResp.History[0].DeployStatus)
|
||||
|
||||
postablePipelines.Pipelines[0].Config = append(
|
||||
postablePipelines.Pipelines[0].Config,
|
||||
@@ -238,8 +241,8 @@ func TestLogPipelinesHistory(t *testing.T) {
|
||||
getPipelinesResp = testbed.GetPipelinesFromQS()
|
||||
|
||||
require.Equal(2, len(getPipelinesResp.History))
|
||||
require.Equal(agentConf.DeployInitiated, getPipelinesResp.History[0].DeployStatus)
|
||||
require.Equal(agentConf.DeployStatusUnknown, getPipelinesResp.History[1].DeployStatus)
|
||||
require.Equal(types.DeployInitiated, getPipelinesResp.History[0].DeployStatus)
|
||||
require.Equal(types.DeployStatusUnknown, getPipelinesResp.History[1].DeployStatus)
|
||||
}
|
||||
|
||||
func TestLogPipelinesValidation(t *testing.T) {
|
||||
@@ -447,24 +450,22 @@ type LogPipelinesTestBed struct {
|
||||
agentConfMgr *agentConf.Manager
|
||||
opampServer *opamp.Server
|
||||
opampClientConn *opamp.MockOpAmpConnection
|
||||
sqlStore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
// testDB can be injected for sharing a DB across multiple integration testbeds.
|
||||
func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipelinesTestBed {
|
||||
if sqlStore == nil {
|
||||
sqlStore = utils.NewQueryServiceDBForTests(t)
|
||||
func NewTestbedWithoutOpamp(t *testing.T, store sqlstore.SQLStore) *LogPipelinesTestBed {
|
||||
if store == nil {
|
||||
store = utils.NewQueryServiceDBForTests(t)
|
||||
}
|
||||
|
||||
// create test org
|
||||
// utils.CreateTestOrg(t, sqlStore)
|
||||
|
||||
ic, err := integrations.NewController(sqlStore)
|
||||
ic, err := integrations.NewController(store)
|
||||
if err != nil {
|
||||
t.Fatalf("could not create integrations controller: %v", err)
|
||||
}
|
||||
|
||||
controller, err := logparsingpipeline.NewLogParsingPipelinesController(
|
||||
sqlStore, ic.GetPipelinesForInstalledIntegrations,
|
||||
store, ic.GetPipelinesForInstalledIntegrations,
|
||||
)
|
||||
if err != nil {
|
||||
t.Fatalf("could not create a logparsingpipelines controller: %v", err)
|
||||
@@ -485,11 +486,11 @@ func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipeli
|
||||
}
|
||||
|
||||
// Mock an available opamp agent
|
||||
testDB, err := opampModel.InitDB(sqlStore.SQLxDB())
|
||||
// testDB, err := opampModel.InitDB(sqlStore.SQLxDB())
|
||||
require.Nil(t, err, "failed to init opamp model")
|
||||
|
||||
agentConfMgr, err := agentConf.Initiate(&agentConf.ManagerOptions{
|
||||
DB: testDB,
|
||||
Store: store,
|
||||
AgentFeatures: []agentConf.AgentFeature{
|
||||
apiHandler.LogsParsingPipelineController,
|
||||
}})
|
||||
@@ -500,14 +501,20 @@ func NewTestbedWithoutOpamp(t *testing.T, sqlStore sqlstore.SQLStore) *LogPipeli
|
||||
testUser: user,
|
||||
apiHandler: apiHandler,
|
||||
agentConfMgr: agentConfMgr,
|
||||
sqlStore: store,
|
||||
}
|
||||
}
|
||||
|
||||
func NewLogPipelinesTestBed(t *testing.T, testDB sqlstore.SQLStore) *LogPipelinesTestBed {
|
||||
testbed := NewTestbedWithoutOpamp(t, testDB)
|
||||
|
||||
orgID, err := utils.GetTestOrgId(testbed.sqlStore)
|
||||
require.Nil(t, err)
|
||||
|
||||
model.InitDB(testbed.sqlStore)
|
||||
|
||||
opampServer := opamp.InitializeServer(nil, testbed.agentConfMgr)
|
||||
err := opampServer.Start(opamp.GetAvailableLocalAddress())
|
||||
err = opampServer.Start(opamp.GetAvailableLocalAddress())
|
||||
require.Nil(t, err, "failed to start opamp server")
|
||||
|
||||
t.Cleanup(func() {
|
||||
@@ -522,6 +529,16 @@ func NewLogPipelinesTestBed(t *testing.T, testDB sqlstore.SQLStore) *LogPipeline
|
||||
EffectiveConfig: &protobufs.EffectiveConfig{
|
||||
ConfigMap: newInitialAgentConfigMap(),
|
||||
},
|
||||
AgentDescription: &protobufs.AgentDescription{
|
||||
IdentifyingAttributes: []*protobufs.KeyValue{
|
||||
{
|
||||
Key: "orgId",
|
||||
Value: &protobufs.AnyValue{
|
||||
Value: &protobufs.AnyValue_StringValue{StringValue: orgID},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
@@ -728,6 +745,7 @@ func (tb *LogPipelinesTestBed) simulateOpampClientAcknowledgementForLatestConfig
|
||||
}
|
||||
|
||||
func (tb *LogPipelinesTestBed) assertNewAgentGetsPipelinesOnConnection(
|
||||
orgID string,
|
||||
pipelines []pipelinetypes.GettablePipeline,
|
||||
) {
|
||||
newAgentConn := &opamp.MockOpAmpConnection{}
|
||||
@@ -738,6 +756,16 @@ func (tb *LogPipelinesTestBed) assertNewAgentGetsPipelinesOnConnection(
|
||||
EffectiveConfig: &protobufs.EffectiveConfig{
|
||||
ConfigMap: newInitialAgentConfigMap(),
|
||||
},
|
||||
AgentDescription: &protobufs.AgentDescription{
|
||||
IdentifyingAttributes: []*protobufs.KeyValue{
|
||||
{
|
||||
Key: "orgId",
|
||||
Value: &protobufs.AnyValue{
|
||||
Value: &protobufs.AnyValue_StringValue{StringValue: orgID},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
latestMsgFromServer := newAgentConn.LatestMsgFromServer()
|
||||
|
||||
@@ -355,7 +355,7 @@ func NewCloudIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *CloudI
|
||||
}
|
||||
|
||||
fm := featureManager.StartManager()
|
||||
reader, mockClickhouse := NewMockClickhouseReader(t, testDB.SQLxDB(), fm)
|
||||
reader, mockClickhouse := NewMockClickhouseReader(t, testDB, fm)
|
||||
mockClickhouse.MatchExpectationsInOrder(false)
|
||||
|
||||
apiHandler, err := app.NewAPIHandler(app.APIHandlerOpts{
|
||||
|
||||
@@ -32,6 +32,9 @@ func TestSignozIntegrationLifeCycle(t *testing.T) {
|
||||
require := require.New(t)
|
||||
testbed := NewIntegrationsTestBed(t, nil)
|
||||
|
||||
merr := utils.CreateTestOrg(t, testbed.store)
|
||||
require.NoError(merr)
|
||||
|
||||
installedResp := testbed.GetInstalledIntegrationsFromQS()
|
||||
require.Equal(
|
||||
len(installedResp.Integrations), 0,
|
||||
@@ -115,6 +118,11 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
testDB := utils.NewQueryServiceDBForTests(t)
|
||||
utils.CreateTestOrg(t, testDB)
|
||||
|
||||
orgID, err := utils.GetTestOrgId(testDB)
|
||||
require.Nil(err)
|
||||
|
||||
integrationsTB := NewIntegrationsTestBed(t, testDB)
|
||||
pipelinesTB := NewLogPipelinesTestBed(t, testDB)
|
||||
|
||||
@@ -172,7 +180,7 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
|
||||
require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(lastPipeline))
|
||||
|
||||
pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(orgID, getPipelinesResp.Pipelines)
|
||||
|
||||
// After saving a user created pipeline, pipelines response should include
|
||||
// both user created pipelines and pipelines for installed integrations.
|
||||
@@ -217,7 +225,7 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
|
||||
getPipelinesResp = pipelinesTB.GetPipelinesFromQS()
|
||||
require.Equal(1+len(testIntegrationPipelines), len(getPipelinesResp.Pipelines))
|
||||
pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(orgID, getPipelinesResp.Pipelines)
|
||||
|
||||
// Reordering integration pipelines should be possible.
|
||||
postable := postableFromPipelines(getPipelinesResp.Pipelines)
|
||||
@@ -234,7 +242,7 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
|
||||
require.Equal(testIntegration.Id, *integrations.IntegrationIdForPipeline(firstPipeline))
|
||||
|
||||
pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(orgID, getPipelinesResp.Pipelines)
|
||||
|
||||
// enabling/disabling integration pipelines should be possible.
|
||||
require.True(firstPipeline.Enabled)
|
||||
@@ -252,7 +260,7 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
|
||||
require.False(firstPipeline.Enabled)
|
||||
|
||||
pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(orgID, getPipelinesResp.Pipelines)
|
||||
|
||||
// should not be able to edit integrations pipeline.
|
||||
require.Greater(len(postable.Pipelines[0].Config), 0)
|
||||
@@ -291,7 +299,7 @@ func TestLogPipelinesForInstalledSignozIntegrations(t *testing.T) {
|
||||
"Pipelines for uninstalled integrations should get removed from pipelines list",
|
||||
)
|
||||
pipelinesTB.assertPipelinesSentToOpampClient(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(getPipelinesResp.Pipelines)
|
||||
pipelinesTB.assertNewAgentGetsPipelinesOnConnection(orgID, getPipelinesResp.Pipelines)
|
||||
}
|
||||
|
||||
func TestDashboardsForInstalledIntegrationDashboards(t *testing.T) {
|
||||
@@ -370,6 +378,7 @@ type IntegrationsTestBed struct {
|
||||
testUser *types.User
|
||||
qsHttpHandler http.Handler
|
||||
mockClickhouse mockhouse.ClickConnMockCommon
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func (tb *IntegrationsTestBed) GetAvailableIntegrationsFromQS() *integrations.IntegrationsListResponse {
|
||||
@@ -557,7 +566,7 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration
|
||||
}
|
||||
|
||||
fm := featureManager.StartManager()
|
||||
reader, mockClickhouse := NewMockClickhouseReader(t, testDB.SQLxDB(), fm)
|
||||
reader, mockClickhouse := NewMockClickhouseReader(t, testDB, fm)
|
||||
mockClickhouse.MatchExpectationsInOrder(false)
|
||||
|
||||
cloudIntegrationsController, err := cloudintegrations.NewController(testDB)
|
||||
@@ -593,6 +602,7 @@ func NewIntegrationsTestBed(t *testing.T, testDB sqlstore.SQLStore) *Integration
|
||||
testUser: user,
|
||||
qsHttpHandler: router,
|
||||
mockClickhouse: mockClickhouse,
|
||||
store: testDB,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,10 +20,10 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/dao"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/google/uuid"
|
||||
"github.com/jmoiron/sqlx"
|
||||
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/entry"
|
||||
mockhouse "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
@@ -33,7 +33,7 @@ import (
|
||||
var jwt = authtypes.NewJWT("secret", 1*time.Hour, 2*time.Hour)
|
||||
|
||||
func NewMockClickhouseReader(
|
||||
t *testing.T, testDB *sqlx.DB, featureFlags interfaces.FeatureLookup,
|
||||
t *testing.T, testDB sqlstore.SQLStore, featureFlags interfaces.FeatureLookup,
|
||||
) (
|
||||
*clickhouseReader.ClickHouseReader, mockhouse.ClickConnMockCommon,
|
||||
) {
|
||||
|
||||
@@ -13,6 +13,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlmigrator"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlitesqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/google/uuid"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
@@ -51,6 +53,7 @@ func NewTestSqliteDB(t *testing.T) (sqlStore sqlstore.SQLStore, testDBFilePath s
|
||||
sqlmigration.NewUpdateDashboardAndSavedViewsFactory(sqlStore),
|
||||
sqlmigration.NewUpdatePatAndOrgDomainsFactory(sqlStore),
|
||||
sqlmigration.NewUpdatePipelines(sqlStore),
|
||||
sqlmigration.NewUpdateAgentsFactory(sqlStore),
|
||||
),
|
||||
)
|
||||
if err != nil {
|
||||
@@ -76,3 +79,28 @@ func NewQueryServiceDBForTests(t *testing.T) sqlstore.SQLStore {
|
||||
|
||||
return sqlStore
|
||||
}
|
||||
|
||||
func CreateTestOrg(t *testing.T, store sqlstore.SQLStore) error {
|
||||
org := &types.Organization{
|
||||
ID: uuid.NewString(),
|
||||
Name: "testOrg",
|
||||
}
|
||||
_, err := store.BunDB().NewInsert().Model(org).Exec(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetTestOrgId(store sqlstore.SQLStore) (string, error) {
|
||||
var orgID string
|
||||
err := store.BunDB().NewSelect().
|
||||
Model(&types.Organization{}).
|
||||
Column("id").
|
||||
Limit(1).
|
||||
Scan(context.Background(), &orgID)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return orgID, nil
|
||||
}
|
||||
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlmigration"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlitesqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
@@ -39,7 +38,6 @@ func NewSQLStoreProviderFactories() factory.NamedMap[factory.ProviderFactory[sql
|
||||
hook := sqlstorehook.NewLoggingFactory()
|
||||
return factory.MustNewNamedMap(
|
||||
sqlitesqlstore.NewFactory(hook),
|
||||
postgressqlstore.NewFactory(hook),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -64,6 +62,11 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM
|
||||
sqlmigration.NewUpdatePipelines(sqlstore),
|
||||
sqlmigration.NewDropLicensesSitesFactory(sqlstore),
|
||||
sqlmigration.NewUpdateInvitesFactory(sqlstore),
|
||||
sqlmigration.NewUpdateAgentsFactory(sqlstore),
|
||||
sqlmigration.NewUpdateAlertmanagerFactory(sqlstore),
|
||||
sqlmigration.NewUpdatePreferencesFactory(sqlstore),
|
||||
sqlmigration.NewUpdateApdexTtlFactory(sqlstore),
|
||||
sqlmigration.NewUpdateResetPasswordFactory(sqlstore),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager/alertmanagerserver"
|
||||
@@ -50,12 +49,15 @@ func (migration *addAlertmanager) Up(ctx context.Context, db *bun.DB) error {
|
||||
|
||||
defer tx.Rollback() //nolint:errcheck
|
||||
|
||||
if _, err := tx.
|
||||
NewDropColumn().
|
||||
Table("notification_channels").
|
||||
ColumnExpr("deleted").
|
||||
Exec(ctx); err != nil {
|
||||
if !strings.Contains(err.Error(), "no such column") {
|
||||
// check if column exists
|
||||
if exists, err := migration.store.Dialect().ColumnExists(ctx, tx, "notification_channels", "deleted"); err != nil {
|
||||
return err
|
||||
} else if exists {
|
||||
if _, err := tx.
|
||||
NewDropColumn().
|
||||
Table("notification_channels").
|
||||
ColumnExpr("deleted").
|
||||
Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -115,7 +115,7 @@ func updateOrgId(ctx context.Context, tx bun.Tx, table string) error {
|
||||
}
|
||||
|
||||
// copy data from org_domains to org_domains_new
|
||||
if _, err := tx.ExecContext(ctx, `INSERT INTO org_domains_new (id, org_id, name, created_at, updated_at, data) SELECT id, org_id, name, created_at, updated_at, data FROM org_domains`); err != nil {
|
||||
if _, err := tx.ExecContext(ctx, `INSERT INTO org_domains_new (id, org_id, name, data) SELECT id, org_id, name, data FROM org_domains`); err != nil {
|
||||
return err
|
||||
}
|
||||
// delete old table
|
||||
|
||||
@@ -75,7 +75,7 @@ func (migration *updateInvites) Up(ctx context.Context, db *bun.DB) error {
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
RenameTableAndModifyModel(ctx, tx, new(existingInvite), new(newInvite), func(ctx context.Context) error {
|
||||
RenameTableAndModifyModel(ctx, tx, new(existingInvite), new(newInvite), OrgReference, func(ctx context.Context) error {
|
||||
existingInvites := make([]*existingInvite, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
|
||||
95
pkg/sqlmigration/020_update_agents.go
Normal file
95
pkg/sqlmigration/020_update_agents.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type updateAgents struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewUpdateAgentsFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_agents"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateAgents(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateAgents(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &updateAgents{
|
||||
store: store,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (migration *updateAgents) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateAgents) Up(ctx context.Context, db *bun.DB) error {
|
||||
|
||||
// begin transaction
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
// get all org ids
|
||||
var orgIDs []string
|
||||
if err := migration.store.BunDB().NewSelect().Model((*types.Organization)(nil)).Column("id").Scan(ctx, &orgIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// add org id to dashboards table
|
||||
for _, table := range []string{"agents", "agent_config_versions", "agent_config_elements"} {
|
||||
if exists, err := migration.store.Dialect().ColumnExists(ctx, tx, table, "org_id"); err != nil {
|
||||
return err
|
||||
} else if !exists {
|
||||
if _, err := tx.NewAddColumn().Table(table).ColumnExpr("org_id TEXT REFERENCES organizations(id) ON DELETE CASCADE").Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// check if there is one org ID if yes then set it to all dashboards.
|
||||
if len(orgIDs) == 1 {
|
||||
orgID := orgIDs[0]
|
||||
if _, err := tx.NewUpdate().Table(table).Set("org_id = ?", orgID).Where("org_id IS NULL").Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// add unique constraint to agents table of org_id and agent_id
|
||||
if exists, err := migration.store.Dialect().IndexExists(ctx, tx, "agents", "idx_agents_org_id_agent_id"); err != nil {
|
||||
return err
|
||||
} else if !exists {
|
||||
if _, err := tx.NewCreateIndex().Table("agents").Index("idx_agents_org_id_agent_id").Column("org_id", "agent_id").Unique().Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// rename agent_id to id
|
||||
_, err = migration.store.Dialect().RenameColumn(ctx, tx, "agents", "agent_id", "id")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateAgents) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
277
pkg/sqlmigration/021_update_alertmanager.go
Normal file
277
pkg/sqlmigration/021_update_alertmanager.go
Normal file
@@ -0,0 +1,277 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type updateAlertmanager struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
type existingChannel struct {
|
||||
bun.BaseModel `bun:"table:notification_channels"`
|
||||
ID int `json:"id" bun:"id,pk,autoincrement"`
|
||||
Name string `json:"name" bun:"name"`
|
||||
Type string `json:"type" bun:"type"`
|
||||
Data string `json:"data" bun:"data"`
|
||||
CreatedAt time.Time `json:"created_at" bun:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at" bun:"updated_at"`
|
||||
OrgID string `json:"org_id" bun:"org_id"`
|
||||
}
|
||||
|
||||
type newChannel struct {
|
||||
bun.BaseModel `bun:"table:notification_channel"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `json:"name" bun:"name"`
|
||||
Type string `json:"type" bun:"type"`
|
||||
Data string `json:"data" bun:"data"`
|
||||
OrgID string `json:"org_id" bun:"org_id"`
|
||||
}
|
||||
|
||||
type existingAlertmanagerConfig struct {
|
||||
bun.BaseModel `bun:"table:alertmanager_config"`
|
||||
ID uint64 `bun:"id,pk,autoincrement"`
|
||||
Config string `bun:"config,notnull,type:text"`
|
||||
Hash string `bun:"hash,notnull,type:text"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull"`
|
||||
UpdatedAt time.Time `bun:"updated_at,notnull"`
|
||||
OrgID string `bun:"org_id,notnull,unique"`
|
||||
}
|
||||
|
||||
type newAlertmanagerConfig struct {
|
||||
bun.BaseModel `bun:"table:alertmanager_config_new"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Config string `bun:"config,notnull,type:text"`
|
||||
Hash string `bun:"hash,notnull,type:text"`
|
||||
OrgID string `bun:"org_id,notnull,unique"`
|
||||
}
|
||||
|
||||
type existingAlertmanagerState struct {
|
||||
bun.BaseModel `bun:"table:alertmanager_state"`
|
||||
ID uint64 `bun:"id,pk,autoincrement"`
|
||||
Silences string `bun:"silences,nullzero,type:text"`
|
||||
NFLog string `bun:"nflog,nullzero,type:text"`
|
||||
CreatedAt time.Time `bun:"created_at,notnull"`
|
||||
UpdatedAt time.Time `bun:"updated_at,notnull"`
|
||||
OrgID string `bun:"org_id,notnull,unique"`
|
||||
}
|
||||
|
||||
type newAlertmanagerState struct {
|
||||
bun.BaseModel `bun:"table:alertmanager_state_new"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Silences string `bun:"silences,nullzero,type:text"`
|
||||
NFLog string `bun:"nflog,nullzero,type:text"`
|
||||
OrgID string `bun:"org_id,notnull,unique"`
|
||||
}
|
||||
|
||||
func NewUpdateAlertmanagerFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_alertmanager"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateAlertmanager(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateAlertmanager(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &updateAlertmanager{store: store}, nil
|
||||
}
|
||||
|
||||
func (migration *updateAlertmanager) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateAlertmanager) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer tx.Rollback()
|
||||
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
RenameTableAndModifyModel(ctx, tx, new(existingChannel), new(newChannel), OrgReference, func(ctx context.Context) error {
|
||||
existingChannels := make([]*existingChannel, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingChannels).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil && len(existingChannels) > 0 {
|
||||
newChannels := migration.
|
||||
CopyOldChannelToNewChannel(existingChannels)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newChannels).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
UpdatePrimaryKey(ctx, tx, new(existingAlertmanagerConfig), new(newAlertmanagerConfig), OrgReference, func(ctx context.Context) error {
|
||||
existingAlertmanagerConfigs := make([]*existingAlertmanagerConfig, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingAlertmanagerConfigs).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil && len(existingAlertmanagerConfigs) > 0 {
|
||||
newAlertmanagerConfigs := migration.
|
||||
CopyOldConfigToNewConfig(existingAlertmanagerConfigs)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newAlertmanagerConfigs).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
UpdatePrimaryKey(ctx, tx, new(existingAlertmanagerState), new(newAlertmanagerState), OrgReference, func(ctx context.Context) error {
|
||||
existingAlertmanagerStates := make([]*existingAlertmanagerState, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingAlertmanagerStates).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil && len(existingAlertmanagerStates) > 0 {
|
||||
newAlertmanagerStates := migration.
|
||||
CopyOldStateToNewState(existingAlertmanagerStates)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newAlertmanagerStates).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateAlertmanager) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateAlertmanager) CopyOldChannelToNewChannel(existingChannels []*existingChannel) []*newChannel {
|
||||
newChannels := make([]*newChannel, 0)
|
||||
for _, channel := range existingChannels {
|
||||
newChannels = append(newChannels, &newChannel{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: channel.CreatedAt,
|
||||
UpdatedAt: channel.UpdatedAt,
|
||||
},
|
||||
Name: channel.Name,
|
||||
Type: channel.Type,
|
||||
Data: channel.Data,
|
||||
OrgID: channel.OrgID,
|
||||
})
|
||||
}
|
||||
|
||||
return newChannels
|
||||
}
|
||||
|
||||
func (migration *updateAlertmanager) CopyOldConfigToNewConfig(existingAlertmanagerConfigs []*existingAlertmanagerConfig) []*newAlertmanagerConfig {
|
||||
newAlertmanagerConfigs := make([]*newAlertmanagerConfig, 0)
|
||||
for _, config := range existingAlertmanagerConfigs {
|
||||
newAlertmanagerConfigs = append(newAlertmanagerConfigs, &newAlertmanagerConfig{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: config.CreatedAt,
|
||||
UpdatedAt: config.UpdatedAt,
|
||||
},
|
||||
Config: config.Config,
|
||||
Hash: config.Hash,
|
||||
OrgID: config.OrgID,
|
||||
})
|
||||
}
|
||||
|
||||
return newAlertmanagerConfigs
|
||||
}
|
||||
|
||||
func (migration *updateAlertmanager) CopyOldStateToNewState(existingAlertmanagerStates []*existingAlertmanagerState) []*newAlertmanagerState {
|
||||
newAlertmanagerStates := make([]*newAlertmanagerState, 0)
|
||||
for _, state := range existingAlertmanagerStates {
|
||||
newAlertmanagerStates = append(newAlertmanagerStates, &newAlertmanagerState{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: state.CreatedAt,
|
||||
UpdatedAt: state.UpdatedAt,
|
||||
},
|
||||
Silences: state.Silences,
|
||||
NFLog: state.NFLog,
|
||||
OrgID: state.OrgID,
|
||||
})
|
||||
}
|
||||
|
||||
return newAlertmanagerStates
|
||||
}
|
||||
202
pkg/sqlmigration/022_update_preferences.go
Normal file
202
pkg/sqlmigration/022_update_preferences.go
Normal file
@@ -0,0 +1,202 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type updatePreferences struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
type existingOrgPreference struct {
|
||||
bun.BaseModel `bun:"table:org_preference"`
|
||||
PreferenceID string `bun:"preference_id,pk,type:text,notnull"`
|
||||
PreferenceValue string `bun:"preference_value,type:text,notnull"`
|
||||
OrgID string `bun:"org_id,pk,type:text,notnull"`
|
||||
}
|
||||
|
||||
type newOrgPreference struct {
|
||||
bun.BaseModel `bun:"table:org_preference_new"`
|
||||
types.Identifiable
|
||||
PreferenceID string `bun:"preference_id,type:text,notnull"`
|
||||
PreferenceValue string `bun:"preference_value,type:text,notnull"`
|
||||
OrgID string `bun:"org_id,type:text,notnull"`
|
||||
}
|
||||
|
||||
type existingUserPreference struct {
|
||||
bun.BaseModel `bun:"table:user_preference"`
|
||||
PreferenceID string `bun:"preference_id,type:text,pk"`
|
||||
PreferenceValue string `bun:"preference_value,type:text"`
|
||||
UserID string `bun:"user_id,type:text,pk"`
|
||||
}
|
||||
|
||||
type newUserPreference struct {
|
||||
bun.BaseModel `bun:"table:user_preference_new"`
|
||||
types.Identifiable
|
||||
PreferenceID string `bun:"preference_id,type:text,notnull"`
|
||||
PreferenceValue string `bun:"preference_value,type:text,notnull"`
|
||||
UserID string `bun:"user_id,type:text,notnull"`
|
||||
}
|
||||
|
||||
func NewUpdatePreferencesFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_preferences"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdatePreferences(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdatePreferences(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &updatePreferences{store: store}, nil
|
||||
}
|
||||
|
||||
func (migration *updatePreferences) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updatePreferences) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer tx.Rollback()
|
||||
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
AddPrimaryKey(ctx, tx, new(existingOrgPreference), new(newOrgPreference), OrgReference, func(ctx context.Context) error {
|
||||
existingOrgPreferences := make([]*existingOrgPreference, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingOrgPreferences).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil && len(existingOrgPreferences) > 0 {
|
||||
newOrgPreferences := migration.
|
||||
CopyOldOrgPreferencesToNewOrgPreferences(existingOrgPreferences)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newOrgPreferences).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
tableName := tx.Dialect().Tables().Get(reflect.TypeOf(new(existingOrgPreference))).Name
|
||||
_, err = tx.
|
||||
ExecContext(ctx, fmt.Sprintf("CREATE UNIQUE INDEX IF NOT EXISTS %s_unique_idx ON %s (preference_id, org_id)", tableName, fmt.Sprintf("%s_new", tableName)))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
AddPrimaryKey(ctx, tx, new(existingUserPreference), new(newUserPreference), UserReference, func(ctx context.Context) error {
|
||||
existingUserPreferences := make([]*existingUserPreference, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingUserPreferences).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil && len(existingUserPreferences) > 0 {
|
||||
newUserPreferences := migration.
|
||||
CopyOldUserPreferencesToNewUserPreferences(existingUserPreferences)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newUserPreferences).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
tableName := tx.Dialect().Tables().Get(reflect.TypeOf(new(existingUserPreference))).Name
|
||||
_, err = tx.
|
||||
ExecContext(ctx, fmt.Sprintf("CREATE UNIQUE INDEX IF NOT EXISTS %s_unique_idx ON %s (preference_id, user_id)", tableName, fmt.Sprintf("%s_new", tableName)))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updatePreferences) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updatePreferences) CopyOldOrgPreferencesToNewOrgPreferences(existingOrgPreferences []*existingOrgPreference) []*newOrgPreference {
|
||||
newOrgPreferences := make([]*newOrgPreference, 0)
|
||||
for _, preference := range existingOrgPreferences {
|
||||
newOrgPreferences = append(newOrgPreferences, &newOrgPreference{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
PreferenceID: preference.PreferenceID,
|
||||
PreferenceValue: preference.PreferenceValue,
|
||||
OrgID: preference.OrgID,
|
||||
})
|
||||
}
|
||||
return newOrgPreferences
|
||||
}
|
||||
|
||||
func (migration *updatePreferences) CopyOldUserPreferencesToNewUserPreferences(existingUserPreferences []*existingUserPreference) []*newUserPreference {
|
||||
newUserPreferences := make([]*newUserPreference, 0)
|
||||
for _, preference := range existingUserPreferences {
|
||||
newUserPreferences = append(newUserPreferences, &newUserPreference{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
PreferenceID: preference.PreferenceID,
|
||||
PreferenceValue: preference.PreferenceValue,
|
||||
UserID: preference.UserID,
|
||||
})
|
||||
}
|
||||
return newUserPreferences
|
||||
}
|
||||
232
pkg/sqlmigration/023_update_apdex_ttl.go
Normal file
232
pkg/sqlmigration/023_update_apdex_ttl.go
Normal file
@@ -0,0 +1,232 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type updateApdexTtl struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
type existingApdexSettings struct {
|
||||
bun.BaseModel `bun:"table:apdex_settings"`
|
||||
OrgID string `bun:"org_id,pk,type:text" json:"orgId"`
|
||||
ServiceName string `bun:"service_name,pk,type:text" json:"serviceName"`
|
||||
Threshold float64 `bun:"threshold,type:float,notnull" json:"threshold"`
|
||||
ExcludeStatusCodes string `bun:"exclude_status_codes,type:text,notnull" json:"excludeStatusCodes"`
|
||||
}
|
||||
|
||||
type newApdexSettings struct {
|
||||
bun.BaseModel `bun:"table:apdex_setting"`
|
||||
types.Identifiable
|
||||
OrgID string `bun:"org_id,type:text" json:"orgId"`
|
||||
ServiceName string `bun:"service_name,type:text" json:"serviceName"`
|
||||
Threshold float64 `bun:"threshold,type:float,notnull" json:"threshold"`
|
||||
ExcludeStatusCodes string `bun:"exclude_status_codes,type:text,notnull" json:"excludeStatusCodes"`
|
||||
}
|
||||
|
||||
type existingTTLStatus struct {
|
||||
bun.BaseModel `bun:"table:ttl_status"`
|
||||
ID int `bun:"id,pk,autoincrement"`
|
||||
TransactionID string `bun:"transaction_id,type:text,notnull"`
|
||||
CreatedAt time.Time `bun:"created_at,type:datetime,notnull"`
|
||||
UpdatedAt time.Time `bun:"updated_at,type:datetime,notnull"`
|
||||
TableName string `bun:"table_name,type:text,notnull"`
|
||||
TTL int `bun:"ttl,notnull,default:0"`
|
||||
ColdStorageTTL int `bun:"cold_storage_ttl,notnull,default:0"`
|
||||
Status string `bun:"status,type:text,notnull"`
|
||||
}
|
||||
|
||||
type newTTLStatus struct {
|
||||
bun.BaseModel `bun:"table:ttl_setting"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
TransactionID string `bun:"transaction_id,type:text,notnull"`
|
||||
TableName string `bun:"table_name,type:text,notnull"`
|
||||
TTL int `bun:"ttl,notnull,default:0"`
|
||||
ColdStorageTTL int `bun:"cold_storage_ttl,notnull,default:0"`
|
||||
Status string `bun:"status,type:text,notnull"`
|
||||
OrgID string `json:"-" bun:"org_id,notnull"`
|
||||
}
|
||||
|
||||
func NewUpdateApdexTtlFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_apdex_ttl"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateApdexTtl(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateApdexTtl(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &updateApdexTtl{store: store}, nil
|
||||
}
|
||||
|
||||
func (migration *updateApdexTtl) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateApdexTtl) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer tx.Rollback()
|
||||
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
RenameTableAndModifyModel(ctx, tx, new(existingApdexSettings), new(newApdexSettings), OrgReference, func(ctx context.Context) error {
|
||||
existingApdexSettings := make([]*existingApdexSettings, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingApdexSettings).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil && len(existingApdexSettings) > 0 {
|
||||
newSettings := migration.
|
||||
CopyExistingApdexSettingsToNewApdexSettings(existingApdexSettings)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newSettings).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
tableName := tx.Dialect().Tables().Get(reflect.TypeOf(new(newApdexSettings))).Name
|
||||
_, err = tx.
|
||||
ExecContext(ctx, fmt.Sprintf("CREATE UNIQUE INDEX IF NOT EXISTS %s_unique_idx ON %s (service_name, org_id)", tableName, tableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = migration.
|
||||
store.
|
||||
Dialect().
|
||||
RenameTableAndModifyModel(ctx, tx, new(existingTTLStatus), new(newTTLStatus), OrgReference, func(ctx context.Context) error {
|
||||
existingTTLStatus := make([]*existingTTLStatus, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingTTLStatus).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
var orgIDs []string
|
||||
if err := migration.
|
||||
store.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model((*types.Organization)(nil)).
|
||||
Column("id").
|
||||
Scan(ctx, &orgIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(orgIDs) > 1 {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot have more than one org id")
|
||||
}
|
||||
|
||||
if err == nil && len(existingTTLStatus) > 0 {
|
||||
newTTLStatus := migration.
|
||||
CopyExistingTTLStatusToNewTTLStatus(existingTTLStatus, orgIDs[0])
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newTTLStatus).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateApdexTtl) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateApdexTtl) CopyExistingApdexSettingsToNewApdexSettings(existingApdexSettings []*existingApdexSettings) []*newApdexSettings {
|
||||
newSettings := make([]*newApdexSettings, 0)
|
||||
for _, apdexSetting := range existingApdexSettings {
|
||||
newSettings = append(newSettings, &newApdexSettings{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
ServiceName: apdexSetting.ServiceName,
|
||||
Threshold: apdexSetting.Threshold,
|
||||
ExcludeStatusCodes: apdexSetting.ExcludeStatusCodes,
|
||||
OrgID: apdexSetting.OrgID,
|
||||
})
|
||||
}
|
||||
|
||||
return newSettings
|
||||
}
|
||||
|
||||
func (migration *updateApdexTtl) CopyExistingTTLStatusToNewTTLStatus(existingTTLStatus []*existingTTLStatus, orgID string) []*newTTLStatus {
|
||||
newTTLStatuses := make([]*newTTLStatus, 0)
|
||||
|
||||
for _, ttl := range existingTTLStatus {
|
||||
newTTLStatuses = append(newTTLStatuses, &newTTLStatus{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: ttl.CreatedAt,
|
||||
UpdatedAt: ttl.UpdatedAt,
|
||||
},
|
||||
TransactionID: ttl.TransactionID,
|
||||
TTL: ttl.TTL,
|
||||
TableName: ttl.TableName,
|
||||
ColdStorageTTL: ttl.ColdStorageTTL,
|
||||
Status: ttl.Status,
|
||||
OrgID: orgID,
|
||||
})
|
||||
}
|
||||
|
||||
return newTTLStatuses
|
||||
}
|
||||
118
pkg/sqlmigration/024_update_reset_password.go
Normal file
118
pkg/sqlmigration/024_update_reset_password.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type updateResetPassword struct {
|
||||
store sqlstore.SQLStore
|
||||
}
|
||||
|
||||
type existingResetPasswordRequest struct {
|
||||
bun.BaseModel `bun:"table:reset_password_request"`
|
||||
ID int `bun:"id,pk,autoincrement" json:"id"`
|
||||
Token string `bun:"token,type:text,notnull" json:"token"`
|
||||
UserID string `bun:"user_id,type:text,notnull" json:"userId"`
|
||||
}
|
||||
|
||||
type newResetPasswordRequest struct {
|
||||
bun.BaseModel `bun:"table:reset_password_request_new"`
|
||||
types.Identifiable
|
||||
Token string `bun:"token,type:text,notnull" json:"token"`
|
||||
UserID string `bun:"user_id,type:text,notnull" json:"userId"`
|
||||
}
|
||||
|
||||
func NewUpdateResetPasswordFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.
|
||||
NewProviderFactory(
|
||||
factory.MustNewName("update_reset_password"),
|
||||
func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return newUpdateResetPassword(ctx, ps, c, sqlstore)
|
||||
})
|
||||
}
|
||||
|
||||
func newUpdateResetPassword(_ context.Context, _ factory.ProviderSettings, _ Config, store sqlstore.SQLStore) (SQLMigration, error) {
|
||||
return &updateResetPassword{store: store}, nil
|
||||
}
|
||||
|
||||
func (migration *updateResetPassword) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.
|
||||
Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateResetPassword) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.
|
||||
BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer tx.Rollback()
|
||||
|
||||
err = migration.store.Dialect().UpdatePrimaryKey(ctx, tx, new(existingResetPasswordRequest), new(newResetPasswordRequest), UserReference, func(ctx context.Context) error {
|
||||
existingResetPasswordRequests := make([]*existingResetPasswordRequest, 0)
|
||||
err = tx.
|
||||
NewSelect().
|
||||
Model(&existingResetPasswordRequests).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
if err != sql.ErrNoRows {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err == nil && len(existingResetPasswordRequests) > 0 {
|
||||
newResetPasswordRequests := migration.
|
||||
CopyExistingResetPasswordRequestsToNewResetPasswordRequests(existingResetPasswordRequests)
|
||||
_, err = tx.
|
||||
NewInsert().
|
||||
Model(&newResetPasswordRequests).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateResetPassword) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateResetPassword) CopyExistingResetPasswordRequestsToNewResetPasswordRequests(existingPasswordRequests []*existingResetPasswordRequest) []*newResetPasswordRequest {
|
||||
newResetPasswordRequests := make([]*newResetPasswordRequest, 0)
|
||||
for _, request := range existingPasswordRequests {
|
||||
newResetPasswordRequests = append(newResetPasswordRequests, &newResetPasswordRequest{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
Token: request.Token,
|
||||
UserID: request.UserID,
|
||||
})
|
||||
}
|
||||
return newResetPasswordRequests
|
||||
}
|
||||
@@ -25,6 +25,11 @@ var (
|
||||
ErrNoExecute = errors.New("no execute")
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = "org"
|
||||
UserReference = "user"
|
||||
)
|
||||
|
||||
func New(
|
||||
ctx context.Context,
|
||||
settings factory.ProviderSettings,
|
||||
|
||||
@@ -2,11 +2,29 @@ package sqlitesqlstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
Identity = "id"
|
||||
Integer = "INTEGER"
|
||||
Text = "TEXT"
|
||||
)
|
||||
|
||||
var (
|
||||
Org = "org"
|
||||
User = "user"
|
||||
)
|
||||
|
||||
var (
|
||||
OrgReference = `("org_id") REFERENCES "organizations" ("id")`
|
||||
UserReference = `("user_id") REFERENCES "users" ("id") ON DELETE CASCADE ON UPDATE CASCADE`
|
||||
)
|
||||
|
||||
type dialect struct {
|
||||
}
|
||||
|
||||
@@ -120,6 +138,22 @@ func (dialect *dialect) ColumnExists(ctx context.Context, bun bun.IDB, table str
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) IndexExists(ctx context.Context, bun bun.IDB, table string, index string) (bool, error) {
|
||||
var count int
|
||||
err := bun.NewSelect().
|
||||
ColumnExpr("COUNT(*)").
|
||||
TableExpr("sqlite_master").
|
||||
Where("type = ?", "index").
|
||||
Where("name = ?", index).
|
||||
Scan(ctx, &count)
|
||||
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table string, oldColumnName string, newColumnName string) (bool, error) {
|
||||
oldColumnExists, err := dialect.ColumnExists(ctx, bun, table, oldColumnName)
|
||||
if err != nil {
|
||||
@@ -165,7 +199,10 @@ func (dialect *dialect) TableExists(ctx context.Context, bun bun.IDB, table inte
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
exists, err := dialect.TableExists(ctx, bun, newModel)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -174,11 +211,18 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(`("org_id") REFERENCES "organizations" ("id")`).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
@@ -201,3 +245,115 @@ func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.I
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
columnType, err := dialect.GetColumnType(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if columnType == Text {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
if reference == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot run migration without reference")
|
||||
}
|
||||
oldTableName := bun.Dialect().Tables().Get(reflect.TypeOf(oldModel)).Name
|
||||
newTableName := bun.Dialect().Tables().Get(reflect.TypeOf(newModel)).Name
|
||||
|
||||
identityExists, err := dialect.ColumnExists(ctx, bun, oldTableName, Identity)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if identityExists {
|
||||
return nil
|
||||
}
|
||||
|
||||
fkReference := ""
|
||||
if reference == Org {
|
||||
fkReference = OrgReference
|
||||
} else if reference == User {
|
||||
fkReference = UserReference
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewCreateTable().
|
||||
IfNotExists().
|
||||
Model(newModel).
|
||||
ForeignKey(fkReference).
|
||||
Exec(ctx)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = cb(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
NewDropTable().
|
||||
IfExists().
|
||||
Model(oldModel).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = bun.
|
||||
ExecContext(ctx, fmt.Sprintf("ALTER TABLE %s RENAME TO %s", newTableName, oldTableName))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -42,5 +42,8 @@ type SQLDialect interface {
|
||||
GetColumnType(context.Context, bun.IDB, string, string) (string, error)
|
||||
ColumnExists(context.Context, bun.IDB, string, string) (bool, error)
|
||||
RenameColumn(context.Context, bun.IDB, string, string, string) (bool, error)
|
||||
RenameTableAndModifyModel(context.Context, bun.IDB, interface{}, interface{}, func(context.Context) error) error
|
||||
RenameTableAndModifyModel(context.Context, bun.IDB, interface{}, interface{}, string, func(context.Context) error) error
|
||||
UpdatePrimaryKey(context.Context, bun.IDB, interface{}, interface{}, string, func(context.Context) error) error
|
||||
AddPrimaryKey(context.Context, bun.IDB, interface{}, interface{}, string, func(context.Context) error) error
|
||||
IndexExists(context.Context, bun.IDB, string, string) (bool, error)
|
||||
}
|
||||
|
||||
@@ -29,6 +29,18 @@ func (dialect *dialect) RenameColumn(ctx context.Context, bun bun.IDB, table str
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, cb func(context.Context) error) error {
|
||||
func (dialect *dialect) RenameTableAndModifyModel(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel interface{}, newModel interface{}, reference string, cb func(context.Context) error) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dialect *dialect) IndexExists(ctx context.Context, bun bun.IDB, table string, index string) (bool, error) {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
@@ -3,42 +3,94 @@ package types
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type Agent struct {
|
||||
bun.BaseModel `bun:"table:agents"`
|
||||
AgentID string `bun:"agent_id,pk,type:text"`
|
||||
StartedAt time.Time `bun:"started_at,type:datetime,notnull"`
|
||||
TerminatedAt time.Time `bun:"terminated_at,type:datetime"`
|
||||
CurrentStatus string `bun:"current_status,type:text,notnull"`
|
||||
EffectiveConfig string `bun:"effective_config,type:text,notnull"`
|
||||
type AgentStatus int
|
||||
|
||||
const (
|
||||
AgentStatusUnknown AgentStatus = iota
|
||||
AgentStatusConnected
|
||||
AgentStatusDisconnected
|
||||
)
|
||||
|
||||
type StorableAgent struct {
|
||||
bun.BaseModel `bun:"table:agents"`
|
||||
|
||||
Identifiable
|
||||
OrgID string `json:"orgId" yaml:"orgId" bun:"org_id,type:text"`
|
||||
StartedAt time.Time `json:"startedAt" yaml:"startedAt" bun:"started_at,type:datetime,notnull"`
|
||||
TerminatedAt time.Time `json:"terminatedAt" yaml:"terminatedAt" bun:"terminated_at,type:datetime"`
|
||||
CurrentStatus AgentStatus `json:"currentStatus" yaml:"currentStatus" bun:"current_status,type:text,notnull"`
|
||||
EffectiveConfig string `bun:"effective_config,type:text,notnull"`
|
||||
}
|
||||
|
||||
type AgentConfigVersion struct {
|
||||
bun.BaseModel `bun:"table:agent_config_versions"`
|
||||
type ElementTypeDef string
|
||||
|
||||
ID string `bun:"id,pk,type:text"`
|
||||
CreatedBy string `bun:"created_by,type:text"`
|
||||
CreatedAt time.Time `bun:"created_at,default:CURRENT_TIMESTAMP"`
|
||||
UpdatedBy string `bun:"updated_by,type:text"`
|
||||
UpdatedAt time.Time `bun:"updated_at,default:CURRENT_TIMESTAMP"`
|
||||
Version int `bun:"version,default:1,unique:element_version_idx"`
|
||||
Active int `bun:"active"`
|
||||
IsValid int `bun:"is_valid"`
|
||||
Disabled int `bun:"disabled"`
|
||||
ElementType string `bun:"element_type,notnull,type:varchar(120),unique:element_version_idx"`
|
||||
DeployStatus string `bun:"deploy_status,notnull,type:varchar(80),default:'DIRTY'"`
|
||||
DeploySequence int `bun:"deploy_sequence"`
|
||||
DeployResult string `bun:"deploy_result,type:text"`
|
||||
LastHash string `bun:"last_hash,type:text"`
|
||||
LastConfig string `bun:"last_config,type:text"`
|
||||
const (
|
||||
ElementTypeSamplingRules ElementTypeDef = "sampling_rules"
|
||||
ElementTypeDropRules ElementTypeDef = "drop_rules"
|
||||
ElementTypeLogPipelines ElementTypeDef = "log_pipelines"
|
||||
ElementTypeLbExporter ElementTypeDef = "lb_exporter"
|
||||
)
|
||||
|
||||
type DeployStatus string
|
||||
|
||||
const (
|
||||
PendingDeploy DeployStatus = "DIRTY"
|
||||
Deploying DeployStatus = "DEPLOYING"
|
||||
Deployed DeployStatus = "DEPLOYED"
|
||||
DeployInitiated DeployStatus = "IN_PROGRESS"
|
||||
DeployFailed DeployStatus = "FAILED"
|
||||
DeployStatusUnknown DeployStatus = "UNKNOWN"
|
||||
)
|
||||
|
||||
type AgentConfigVersion struct {
|
||||
bun.BaseModel `bun:"table:agent_config_versions,alias:acv"`
|
||||
|
||||
TimeAuditable
|
||||
UserAuditable
|
||||
|
||||
CreatedByName string `json:"createdByName" bun:"created_by_name,scanonly"`
|
||||
|
||||
Identifiable
|
||||
OrgID string `json:"orgId" bun:"org_id,type:text"`
|
||||
Version int `json:"version" bun:"version,default:1,unique:element_version_idx"`
|
||||
Active bool `json:"active" bun:"active"`
|
||||
IsValid bool `json:"is_valid" bun:"is_valid"`
|
||||
Disabled bool `json:"disabled" bun:"disabled"`
|
||||
ElementType ElementTypeDef `json:"elementType" bun:"element_type,notnull,type:varchar(120),unique:element_version_idx"`
|
||||
DeployStatus DeployStatus `json:"deployStatus" bun:"deploy_status,notnull,type:varchar(80),default:'DIRTY'"`
|
||||
DeploySequence int `json:"deploySequence" bun:"deploy_sequence"`
|
||||
DeployResult string `json:"deployResult" bun:"deploy_result,type:text"`
|
||||
LastHash string `json:"lastHash" bun:"last_hash,type:text"`
|
||||
LastConfig string `json:"lastConfig" bun:"last_config,type:text"`
|
||||
}
|
||||
|
||||
func NewAgentConfigVersion(orgId string, typeDef ElementTypeDef) *AgentConfigVersion {
|
||||
return &AgentConfigVersion{
|
||||
OrgID: orgId,
|
||||
Identifiable: Identifiable{ID: valuer.GenerateUUID()},
|
||||
ElementType: typeDef,
|
||||
Active: false,
|
||||
IsValid: false,
|
||||
Disabled: false,
|
||||
DeployStatus: PendingDeploy,
|
||||
LastHash: "",
|
||||
LastConfig: "{}",
|
||||
}
|
||||
}
|
||||
|
||||
func UpdateVersion(v int) int {
|
||||
return v + 1
|
||||
}
|
||||
|
||||
type AgentConfigElement struct {
|
||||
bun.BaseModel `bun:"table:agent_config_elements"`
|
||||
|
||||
ID string `bun:"id,pk,type:text"`
|
||||
Identifiable
|
||||
OrgID string `bun:"org_id,type:text"`
|
||||
CreatedBy string `bun:"created_by,type:text"`
|
||||
CreatedAt time.Time `bun:"created_at,default:CURRENT_TIMESTAMP"`
|
||||
UpdatedBy string `bun:"updated_by,type:text"`
|
||||
|
||||
@@ -7,6 +7,8 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/alertmanager/config"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -27,15 +29,14 @@ type GettableChannels = []*Channel
|
||||
|
||||
// Channel represents a single receiver of the alertmanager config.
|
||||
type Channel struct {
|
||||
bun.BaseModel `bun:"table:notification_channels"`
|
||||
bun.BaseModel `bun:"table:notification_channel"`
|
||||
|
||||
ID int `json:"id" bun:"id,pk,autoincrement"`
|
||||
Name string `json:"name" bun:"name"`
|
||||
Type string `json:"type" bun:"type"`
|
||||
Data string `json:"data" bun:"data"`
|
||||
CreatedAt time.Time `json:"created_at" bun:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at" bun:"updated_at"`
|
||||
OrgID string `json:"org_id" bun:"org_id"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `json:"name" bun:"name"`
|
||||
Type string `json:"type" bun:"type"`
|
||||
Data string `json:"data" bun:"data"`
|
||||
OrgID string `json:"org_id" bun:"org_id"`
|
||||
}
|
||||
|
||||
// NewChannelFromReceiver creates a new Channel from a Receiver.
|
||||
@@ -47,10 +48,15 @@ func NewChannelFromReceiver(receiver config.Receiver, orgID string) *Channel {
|
||||
|
||||
// Initialize channel with common fields
|
||||
channel := Channel{
|
||||
Name: receiver.Name,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
OrgID: orgID,
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Name: receiver.Name,
|
||||
OrgID: orgID,
|
||||
}
|
||||
|
||||
// Use reflection to examine receiver struct fields
|
||||
@@ -120,14 +126,14 @@ func NewConfigFromChannels(globalConfig GlobalConfig, routeConfig RouteConfig, c
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func GetChannelByID(channels Channels, id int) (int, *Channel, error) {
|
||||
func GetChannelByID(channels Channels, id valuer.UUID) (int, *Channel, error) {
|
||||
for i, channel := range channels {
|
||||
if channel.ID == id {
|
||||
return i, channel, nil
|
||||
}
|
||||
}
|
||||
|
||||
return 0, nil, errors.Newf(errors.TypeNotFound, ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %d", id)
|
||||
return 0, nil, errors.Newf(errors.TypeNotFound, ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %s", id.StringValue())
|
||||
}
|
||||
|
||||
func GetChannelByName(channels Channels, name string) (int, *Channel, error) {
|
||||
@@ -143,7 +149,7 @@ func GetChannelByName(channels Channels, name string) (int, *Channel, error) {
|
||||
func (c *Channel) Update(receiver Receiver) error {
|
||||
channel := NewChannelFromReceiver(receiver, c.OrgID)
|
||||
if channel == nil {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %d", c.ID)
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeAlertmanagerChannelNotFound, "cannot find channel with id %s", c.ID.StringValue())
|
||||
}
|
||||
|
||||
if c.Name != channel.Name {
|
||||
|
||||
@@ -10,6 +10,8 @@ import (
|
||||
|
||||
"dario.cat/mergo"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/alertmanager/config"
|
||||
commoncfg "github.com/prometheus/common/config"
|
||||
"github.com/prometheus/common/model"
|
||||
@@ -41,12 +43,11 @@ type RouteConfig struct {
|
||||
type StoreableConfig struct {
|
||||
bun.BaseModel `bun:"table:alertmanager_config"`
|
||||
|
||||
ID uint64 `bun:"id,pk,autoincrement"`
|
||||
Config string `bun:"config"`
|
||||
Hash string `bun:"hash"`
|
||||
CreatedAt time.Time `bun:"created_at"`
|
||||
UpdatedAt time.Time `bun:"updated_at"`
|
||||
OrgID string `bun:"org_id"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Config string `bun:"config"`
|
||||
Hash string `bun:"hash"`
|
||||
OrgID string `bun:"org_id"`
|
||||
}
|
||||
|
||||
// Config is the type for the entire alertmanager configuration
|
||||
@@ -63,11 +64,16 @@ func NewConfig(c *config.Config, orgID string) *Config {
|
||||
return &Config{
|
||||
alertmanagerConfig: c,
|
||||
storeableConfig: &StoreableConfig{
|
||||
Config: raw,
|
||||
Hash: fmt.Sprintf("%x", newConfigHash(raw)),
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
OrgID: orgID,
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Config: raw,
|
||||
Hash: fmt.Sprintf("%x", newConfigHash(raw)),
|
||||
OrgID: orgID,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -370,13 +376,13 @@ type ConfigStore interface {
|
||||
CreateChannel(context.Context, *Channel, ...StoreOption) error
|
||||
|
||||
// GetChannelByID returns the channel for the given id.
|
||||
GetChannelByID(context.Context, string, int) (*Channel, error)
|
||||
GetChannelByID(context.Context, string, valuer.UUID) (*Channel, error)
|
||||
|
||||
// UpdateChannel updates a channel.
|
||||
UpdateChannel(context.Context, string, *Channel, ...StoreOption) error
|
||||
|
||||
// DeleteChannelByID deletes a channel.
|
||||
DeleteChannelByID(context.Context, string, int, ...StoreOption) error
|
||||
DeleteChannelByID(context.Context, string, valuer.UUID, ...StoreOption) error
|
||||
|
||||
// ListChannels returns the list of channels.
|
||||
ListChannels(context.Context, string) ([]*Channel, error)
|
||||
|
||||
@@ -6,6 +6,8 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/alertmanager/cluster"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
@@ -28,19 +30,23 @@ var (
|
||||
type StoreableState struct {
|
||||
bun.BaseModel `bun:"table:alertmanager_state"`
|
||||
|
||||
ID uint64 `bun:"id,pk,autoincrement"`
|
||||
Silences string `bun:"silences,nullzero"`
|
||||
NFLog string `bun:"nflog,nullzero"`
|
||||
CreatedAt time.Time `bun:"created_at"`
|
||||
UpdatedAt time.Time `bun:"updated_at"`
|
||||
OrgID string `bun:"org_id"`
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Silences string `bun:"silences,nullzero"`
|
||||
NFLog string `bun:"nflog,nullzero"`
|
||||
OrgID string `bun:"org_id"`
|
||||
}
|
||||
|
||||
func NewStoreableState(orgID string) *StoreableState {
|
||||
return &StoreableState{
|
||||
OrgID: orgID,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
Identifiable: types.Identifiable{
|
||||
ID: valuer.GenerateUUID(),
|
||||
},
|
||||
TimeAuditable: types.TimeAuditable{
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
OrgID: orgID,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -94,15 +94,14 @@ type PlannedMaintenance struct {
|
||||
UpdatedBy string `bun:"updated_by,type:text,notnull"`
|
||||
}
|
||||
|
||||
type TTLStatus struct {
|
||||
bun.BaseModel `bun:"table:ttl_status"`
|
||||
|
||||
ID int `bun:"id,pk,autoincrement"`
|
||||
TransactionID string `bun:"transaction_id,type:text,notnull"`
|
||||
CreatedAt time.Time `bun:"created_at,type:datetime,notnull"`
|
||||
UpdatedAt time.Time `bun:"updated_at,type:datetime,notnull"`
|
||||
TableName string `bun:"table_name,type:text,notnull"`
|
||||
TTL int `bun:"ttl,notnull,default:0"`
|
||||
ColdStorageTTL int `bun:"cold_storage_ttl,notnull,default:0"`
|
||||
Status string `bun:"status,type:text,notnull"`
|
||||
type TTLSetting struct {
|
||||
bun.BaseModel `bun:"table:ttl_setting"`
|
||||
Identifiable
|
||||
TimeAuditable
|
||||
TransactionID string `bun:"transaction_id,type:text,notnull"`
|
||||
TableName string `bun:"table_name,type:text,notnull"`
|
||||
TTL int `bun:"ttl,notnull,default:0"`
|
||||
ColdStorageTTL int `bun:"cold_storage_ttl,notnull,default:0"`
|
||||
Status string `bun:"status,type:text,notnull"`
|
||||
OrgID string `json:"-" bun:"org_id,notnull"`
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
// TODO: check constraints are not working
|
||||
type Organization struct {
|
||||
bun.BaseModel `bun:"table:organizations"`
|
||||
|
||||
TimeAuditable
|
||||
ID string `bun:"id,pk,type:text" json:"id"`
|
||||
Name string `bun:"name,type:text,notnull" json:"name"`
|
||||
@@ -16,8 +15,10 @@ type Organization struct {
|
||||
}
|
||||
|
||||
type ApdexSettings struct {
|
||||
OrgID string `bun:"org_id,pk,type:text" json:"orgId"`
|
||||
ServiceName string `bun:"service_name,pk,type:text" json:"serviceName"`
|
||||
bun.BaseModel `bun:"table:apdex_setting"`
|
||||
Identifiable
|
||||
OrgID string `bun:"org_id,type:text" json:"orgId"`
|
||||
ServiceName string `bun:"service_name,type:text" json:"serviceName"`
|
||||
Threshold float64 `bun:"threshold,type:float,notnull" json:"threshold"`
|
||||
ExcludeStatusCodes string `bun:"exclude_status_codes,type:text,notnull" json:"excludeStatusCodes"`
|
||||
}
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
package types
|
||||
|
||||
import "github.com/uptrace/bun"
|
||||
|
||||
// on_delete:CASCADE,on_update:CASCADE not working
|
||||
type UserPreference struct {
|
||||
bun.BaseModel `bun:"table:user_preference"`
|
||||
|
||||
PreferenceID string `bun:"preference_id,type:text,pk"`
|
||||
PreferenceValue string `bun:"preference_value,type:text"`
|
||||
UserID string `bun:"user_id,type:text,pk"`
|
||||
}
|
||||
|
||||
// on_delete:CASCADE,on_update:CASCADE not working
|
||||
type OrgPreference struct {
|
||||
bun.BaseModel `bun:"table:org_preference"`
|
||||
|
||||
PreferenceID string `bun:"preference_id,pk,type:text,notnull"`
|
||||
PreferenceValue string `bun:"preference_value,type:text,notnull"`
|
||||
OrgID string `bun:"org_id,pk,type:text,notnull"`
|
||||
}
|
||||
290
pkg/types/preferencetypes/preference.go
Normal file
290
pkg/types/preferencetypes/preference.go
Normal file
@@ -0,0 +1,290 @@
|
||||
package preferencetypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type GettablePreference struct {
|
||||
PreferenceID string `json:"preference_id" db:"preference_id"`
|
||||
PreferenceValue interface{} `json:"preference_value" db:"preference_value"`
|
||||
}
|
||||
|
||||
type UpdatablePreference struct {
|
||||
PreferenceValue interface{} `json:"preference_value" db:"preference_value"`
|
||||
}
|
||||
|
||||
type StorableOrgPreference struct {
|
||||
bun.BaseModel `bun:"table:org_preference"`
|
||||
types.Identifiable
|
||||
PreferenceID string `bun:"preference_id,type:text,notnull"`
|
||||
PreferenceValue string `bun:"preference_value,type:text,notnull"`
|
||||
OrgID string `bun:"org_id,type:text,notnull"`
|
||||
}
|
||||
|
||||
type StorableUserPreference struct {
|
||||
bun.BaseModel `bun:"table:user_preference"`
|
||||
types.Identifiable
|
||||
PreferenceID string `bun:"preference_id,type:text,notnull"`
|
||||
PreferenceValue string `bun:"preference_value,type:text,notnull"`
|
||||
UserID string `bun:"user_id,type:text,notnull"`
|
||||
}
|
||||
|
||||
type Preference struct {
|
||||
Key string `json:"key"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
ValueType string `json:"valueType"`
|
||||
DefaultValue interface{} `json:"defaultValue"`
|
||||
AllowedValues []interface{} `json:"allowedValues"`
|
||||
IsDiscreteValues bool `json:"isDiscreteValues"`
|
||||
Range Range `json:"range"`
|
||||
AllowedScopes []string `json:"allowedScopes"`
|
||||
}
|
||||
|
||||
func NewDefaultPreferenceMap() map[string]Preference {
|
||||
return map[string]Preference{
|
||||
"ORG_ONBOARDING": {
|
||||
Key: "ORG_ONBOARDING",
|
||||
Name: "Organisation Onboarding",
|
||||
Description: "Organisation Onboarding",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"org"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_DO_LATER": {
|
||||
Key: "WELCOME_CHECKLIST_DO_LATER",
|
||||
Name: "Welcome Checklist Do Later",
|
||||
Description: "Welcome Checklist Do Later",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SEND_LOGS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SEND_LOGS_SKIPPED",
|
||||
Name: "Welcome Checklist Send Logs Skipped",
|
||||
Description: "Welcome Checklist Send Logs Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SEND_TRACES_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SEND_TRACES_SKIPPED",
|
||||
Name: "Welcome Checklist Send Traces Skipped",
|
||||
Description: "Welcome Checklist Send Traces Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SEND_INFRA_METRICS_SKIPPED",
|
||||
Name: "Welcome Checklist Send Infra Metrics Skipped",
|
||||
Description: "Welcome Checklist Send Infra Metrics Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SETUP_DASHBOARDS_SKIPPED",
|
||||
Name: "Welcome Checklist Setup Dashboards Skipped",
|
||||
Description: "Welcome Checklist Setup Dashboards Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SETUP_ALERTS_SKIPPED",
|
||||
Name: "Welcome Checklist Setup Alerts Skipped",
|
||||
Description: "Welcome Checklist Setup Alerts Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
"WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED": {
|
||||
Key: "WELCOME_CHECKLIST_SETUP_SAVED_VIEW_SKIPPED",
|
||||
Name: "Welcome Checklist Setup Saved View Skipped",
|
||||
Description: "Welcome Checklist Setup Saved View Skipped",
|
||||
ValueType: "boolean",
|
||||
DefaultValue: false,
|
||||
AllowedValues: []interface{}{true, false},
|
||||
IsDiscreteValues: true,
|
||||
AllowedScopes: []string{"user"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Preference) ErrorValueTypeMismatch() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("the preference value is not of expected type: %s", p.ValueType))
|
||||
}
|
||||
|
||||
func (p *Preference) checkIfInAllowedValues(preferenceValue interface{}) (bool, error) {
|
||||
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeInteger:
|
||||
_, ok := preferenceValue.(int64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeFloat:
|
||||
_, ok := preferenceValue.(float64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
_, ok := preferenceValue.(string)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeBoolean:
|
||||
_, ok := preferenceValue.(bool)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
}
|
||||
isInAllowedValues := false
|
||||
for _, value := range p.AllowedValues {
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeInteger:
|
||||
allowedValue, ok := value.(int64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
case PreferenceValueTypeFloat:
|
||||
allowedValue, ok := value.(float64)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
allowedValue, ok := value.(string)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
case PreferenceValueTypeBoolean:
|
||||
allowedValue, ok := value.(bool)
|
||||
if !ok {
|
||||
return false, p.ErrorValueTypeMismatch()
|
||||
}
|
||||
|
||||
if allowedValue == preferenceValue {
|
||||
isInAllowedValues = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return isInAllowedValues, nil
|
||||
}
|
||||
|
||||
func (p *Preference) IsValidValue(preferenceValue interface{}) error {
|
||||
typeSafeValue := preferenceValue
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeInteger:
|
||||
val, ok := preferenceValue.(int64)
|
||||
if !ok {
|
||||
floatVal, ok := preferenceValue.(float64)
|
||||
if !ok || floatVal != float64(int64(floatVal)) {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
val = int64(floatVal)
|
||||
typeSafeValue = val
|
||||
}
|
||||
if !p.IsDiscreteValues {
|
||||
if val < p.Range.Min || val > p.Range.Max {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("the preference value is not in the range specified, min: %v , max:%v", p.Range.Min, p.Range.Max))
|
||||
}
|
||||
}
|
||||
case PreferenceValueTypeString:
|
||||
_, ok := preferenceValue.(string)
|
||||
if !ok {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeFloat:
|
||||
_, ok := preferenceValue.(float64)
|
||||
if !ok {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
case PreferenceValueTypeBoolean:
|
||||
_, ok := preferenceValue.(bool)
|
||||
if !ok {
|
||||
return p.ErrorValueTypeMismatch()
|
||||
}
|
||||
}
|
||||
|
||||
// check the validity of the value being part of allowed values or the range specified if any
|
||||
if p.IsDiscreteValues {
|
||||
if p.AllowedValues != nil {
|
||||
isInAllowedValues, valueMisMatchErr := p.checkIfInAllowedValues(typeSafeValue)
|
||||
|
||||
if valueMisMatchErr != nil {
|
||||
return valueMisMatchErr
|
||||
}
|
||||
if !isInAllowedValues {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, fmt.Sprintf("the preference value is not in the list of allowedValues: %v", p.AllowedValues))
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Preference) IsEnabledForScope(scope string) bool {
|
||||
isPreferenceEnabledForGivenScope := false
|
||||
if p.AllowedScopes != nil {
|
||||
for _, allowedScope := range p.AllowedScopes {
|
||||
if allowedScope == strings.ToLower(scope) {
|
||||
isPreferenceEnabledForGivenScope = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return isPreferenceEnabledForGivenScope
|
||||
}
|
||||
|
||||
func (p *Preference) SanitizeValue(preferenceValue interface{}) interface{} {
|
||||
switch p.ValueType {
|
||||
case PreferenceValueTypeBoolean:
|
||||
if preferenceValue == "1" || preferenceValue == true || preferenceValue == "true" {
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
default:
|
||||
return preferenceValue
|
||||
}
|
||||
}
|
||||
|
||||
type PreferenceStore interface {
|
||||
GetOrgPreference(context.Context, string, string) (*StorableOrgPreference, error)
|
||||
GetAllOrgPreferences(context.Context, string) ([]*StorableOrgPreference, error)
|
||||
UpsertOrgPreference(context.Context, *StorableOrgPreference) error
|
||||
GetUserPreference(context.Context, string, string) (*StorableUserPreference, error)
|
||||
GetAllUserPreferences(context.Context, string) ([]*StorableUserPreference, error)
|
||||
UpsertUserPreference(context.Context, *StorableUserPreference) error
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user