Compare commits

..

6 Commits

9 changed files with 1250 additions and 1593 deletions

View File

@@ -12,6 +12,8 @@ export interface MockUPlotInstance {
export interface MockUPlotPaths {
spline: jest.Mock;
bars: jest.Mock;
linear: jest.Mock;
stepped: jest.Mock;
}
// Create mock instance methods
@@ -23,10 +25,20 @@ const createMockUPlotInstance = (): MockUPlotInstance => ({
setSeries: jest.fn(),
});
// Create mock paths
const mockPaths: MockUPlotPaths = {
spline: jest.fn(),
bars: jest.fn(),
// Path builder: (self, seriesIdx, idx0, idx1) => paths or null
const createMockPathBuilder = (): jest.Mock =>
jest.fn(() => ({
stroke: jest.fn(),
fill: jest.fn(),
clip: jest.fn(),
}));
// Create mock paths - linear, spline, stepped needed by UPlotSeriesBuilder.getPathBuilder
const mockPaths = {
spline: jest.fn(() => createMockPathBuilder()),
bars: jest.fn(() => createMockPathBuilder()),
linear: jest.fn(() => createMockPathBuilder()),
stepped: jest.fn((opts?: { align?: number }) => createMockPathBuilder()),
};
// Mock static methods

View File

@@ -0,0 +1,356 @@
import { getToolTipValue } from 'components/Graph/yAxisConfig';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { uPlotXAxisValuesFormat } from 'lib/uPlotLib/utils/constants';
import getGridColor from 'lib/uPlotLib/utils/getGridColor';
import type uPlot from 'uplot';
import type { AxisProps } from '../types';
import { UPlotAxisBuilder } from '../UPlotAxisBuilder';
jest.mock('components/Graph/yAxisConfig', () => ({
getToolTipValue: jest.fn(),
}));
const createAxisProps = (overrides: Partial<AxisProps> = {}): AxisProps => ({
scaleKey: 'x',
label: 'Time',
isDarkMode: false,
show: true,
...overrides,
});
describe('UPlotAxisBuilder', () => {
beforeEach(() => {
jest.clearAllMocks();
});
it('builds basic axis config with defaults', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
label: 'Time',
}),
);
const config = builder.getConfig();
expect(config.scale).toBe('x');
expect(config.label).toBe('Time');
expect(config.show).toBe(true);
expect(config.side).toBe(2);
expect(config.gap).toBe(5);
// Default grid and ticks are created
expect(config.grid).toEqual({
stroke: getGridColor(false),
width: 0.2,
show: true,
});
expect(config.ticks).toEqual({
width: 0.3,
show: true,
});
});
it('merges custom grid config over defaults and respects isDarkMode and isLogScale', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
isDarkMode: true,
isLogScale: true,
grid: {
width: 1,
},
}),
);
const config = builder.getConfig();
expect(config.grid).toEqual({
// stroke falls back to theme-based default when not provided
stroke: getGridColor(true),
// provided width overrides default
width: 1,
// show falls back to default when not provided
show: true,
});
});
it('uses provided ticks config when present and falls back to defaults otherwise', () => {
const customTicks = { width: 1, show: false };
const withTicks = new UPlotAxisBuilder(
createAxisProps({
ticks: customTicks,
}),
);
const withoutTicks = new UPlotAxisBuilder(createAxisProps());
expect(withTicks.getConfig().ticks).toBe(customTicks);
expect(withoutTicks.getConfig().ticks).toEqual({
width: 0.3,
show: true,
});
});
it('uses time-based X-axis values formatter for time-series like panels', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.TIME_SERIES,
}),
);
const config = builder.getConfig();
expect(config.values).toBe(uPlotXAxisValuesFormat);
});
it('does not attach X-axis datetime formatter when panel type is not supported', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.LIST, // not in PANEL_TYPES_WITH_X_AXIS_DATETIME_FORMAT
}),
);
const config = builder.getConfig();
expect(config.values).toBeUndefined();
});
it('builds Y-axis values formatter that delegates to getToolTipValue', () => {
const yBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
yAxisUnit: 'ms',
decimalPrecision: 3,
}),
);
const config = yBuilder.getConfig();
expect(typeof config.values).toBe('function');
(getToolTipValue as jest.Mock).mockImplementation(
(value: string, unit?: string, precision?: unknown) =>
`formatted:${value}:${unit}:${precision}`,
);
// Simulate uPlot calling the values formatter
const valuesFn = (config.values as unknown) as (
self: uPlot,
vals: unknown[],
) => string[];
const result = valuesFn({} as uPlot, [1, null, 2, Number.NaN]);
expect(getToolTipValue).toHaveBeenCalledTimes(2);
expect(getToolTipValue).toHaveBeenNthCalledWith(1, '1', 'ms', 3);
expect(getToolTipValue).toHaveBeenNthCalledWith(2, '2', 'ms', 3);
// Null/NaN values should map to empty strings
expect(result).toEqual(['formatted:1:ms:3', '', 'formatted:2:ms:3', '']);
});
it('adds dynamic size calculator only for Y-axis when size is not provided', () => {
const yBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
}),
);
const xBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
}),
);
const yConfig = yBuilder.getConfig();
const xConfig = xBuilder.getConfig();
expect(typeof yConfig.size).toBe('function');
expect(xConfig.size).toBeUndefined();
});
it('uses explicit size function when provided', () => {
const sizeFn: uPlot.Axis.Size = jest.fn(() => 100) as uPlot.Axis.Size;
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
size: sizeFn,
}),
);
const config = builder.getConfig();
expect(config.size).toBe(sizeFn);
});
it('builds stroke color based on stroke and isDarkMode', () => {
const explicitStroke = new UPlotAxisBuilder(
createAxisProps({
stroke: '#ff0000',
}),
);
const darkStroke = new UPlotAxisBuilder(
createAxisProps({
stroke: undefined,
isDarkMode: true,
}),
);
const lightStroke = new UPlotAxisBuilder(
createAxisProps({
stroke: undefined,
isDarkMode: false,
}),
);
expect(explicitStroke.getConfig().stroke).toBe('#ff0000');
expect(darkStroke.getConfig().stroke).toBe('white');
expect(lightStroke.getConfig().stroke).toBe('black');
});
it('uses explicit values formatter when provided', () => {
const customValues: uPlot.Axis.Values = jest.fn(() => ['a', 'b', 'c']);
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'y',
values: customValues,
}),
);
const config = builder.getConfig();
expect(config.values).toBe(customValues);
});
it('returns undefined values for scaleKey neither x nor y', () => {
const builder = new UPlotAxisBuilder(createAxisProps({ scaleKey: 'custom' }));
const config = builder.getConfig();
expect(config.values).toBeUndefined();
});
it('omits stroke when stroke and isDarkMode are both undefined', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'custom',
stroke: undefined,
isDarkMode: undefined,
}),
);
const config = builder.getConfig();
expect(config.stroke).toBeUndefined();
});
it('includes space in config when provided', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({ scaleKey: 'y', space: 50 }),
);
const config = builder.getConfig();
expect(config.space).toBe(50);
});
it('includes PANEL_TYPES.BAR and PANEL_TYPES.PIE in X-axis datetime formatter', () => {
const barBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.BAR,
}),
);
const pieBuilder = new UPlotAxisBuilder(
createAxisProps({
scaleKey: 'x',
panelType: PANEL_TYPES.PIE,
}),
);
expect(barBuilder.getConfig().values).toBe(uPlotXAxisValuesFormat);
expect(pieBuilder.getConfig().values).toBe(uPlotXAxisValuesFormat);
});
it('invokes Y-axis size calculator and delegates to getExistingAxisSize when cycleNum > 1', () => {
const builder = new UPlotAxisBuilder(createAxisProps({ scaleKey: 'y' }));
const config = builder.getConfig();
const sizeFn = config.size;
expect(typeof sizeFn).toBe('function');
const mockAxis = {
_size: 80,
ticks: { size: 10 },
font: ['12px sans-serif'],
};
const mockSelf = ({
axes: [mockAxis],
ctx: { measureText: jest.fn(() => ({ width: 60 })), font: '' },
} as unknown) as uPlot;
const result = (sizeFn as (
s: uPlot,
v: string[],
a: number,
c: number,
) => number)(
mockSelf,
['100', '200'],
0,
2, // cycleNum > 1
);
expect(result).toBe(80);
});
it('invokes Y-axis size calculator and computes from text width when cycleNum <= 1', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({ scaleKey: 'y', gap: 8 }),
);
const config = builder.getConfig();
const sizeFn = config.size;
expect(typeof sizeFn).toBe('function');
const mockAxis = {
ticks: { size: 12 },
font: ['12px sans-serif'],
};
const measureText = jest.fn(() => ({ width: 48 }));
const mockSelf = ({
axes: [mockAxis],
ctx: {
measureText,
get font() {
return '';
},
set font(_v: string) {
/* noop */
},
},
} as unknown) as uPlot;
const result = (sizeFn as (
s: uPlot,
v: string[],
a: number,
c: number,
) => number)(mockSelf, ['10', '2000ms'], 0, 0);
expect(measureText).toHaveBeenCalledWith('2000ms');
expect(result).toBeGreaterThanOrEqual(12 + 8);
});
it('merge updates axis props', () => {
const builder = new UPlotAxisBuilder(
createAxisProps({ scaleKey: 'y', label: 'Original' }),
);
builder.merge({ label: 'Merged', yAxisUnit: 'bytes' });
const config = builder.getConfig();
expect(config.label).toBe('Merged');
expect(config.values).toBeDefined();
});
});

View File

@@ -0,0 +1,331 @@
import uPlot from 'uplot';
import type { SeriesProps } from '../types';
import { DrawStyle, SelectionPreferencesSource } from '../types';
import { UPlotConfigBuilder } from '../UPlotConfigBuilder';
// Mock only the real boundary that hits localStorage
jest.mock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
() => ({
getStoredSeriesVisibility: jest.fn(),
}),
);
const getStoredSeriesVisibilityMock = jest.requireMock(
'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils',
) as {
getStoredSeriesVisibility: jest.Mock;
};
describe('UPlotConfigBuilder', () => {
beforeEach(() => {
jest.clearAllMocks();
});
const createSeriesProps = (
overrides: Partial<SeriesProps> = {},
): SeriesProps => ({
scaleKey: 'y',
label: 'Requests',
colorMapping: {},
drawStyle: DrawStyle.Line,
...overrides,
});
it('returns correct save selection preference flag from constructor args', () => {
const builder = new UPlotConfigBuilder({
shouldSaveSelectionPreference: true,
});
expect(builder.getShouldSaveSelectionPreference()).toBe(true);
});
it('returns widgetId from constructor args', () => {
const builder = new UPlotConfigBuilder({ widgetId: 'widget-123' });
expect(builder.getWidgetId()).toBe('widget-123');
});
it('sets tzDate from constructor and includes it in config', () => {
const tzDate = (ts: number): Date => new Date(ts);
const builder = new UPlotConfigBuilder({ tzDate });
const config = builder.getConfig();
expect(config.tzDate).toBe(tzDate);
});
it('does not call onDragSelect for click without drag (width === 0)', () => {
const onDragSelect = jest.fn();
const builder = new UPlotConfigBuilder({ onDragSelect });
const config = builder.getConfig();
const setSelectHooks = config.hooks?.setSelect ?? [];
expect(setSelectHooks.length).toBe(1);
const uplotInstance = ({
select: { left: 10, width: 0 },
posToVal: jest.fn(),
} as unknown) as uPlot;
// Simulate uPlot calling the hook
const setSelectHook = setSelectHooks[0];
expect(setSelectHook).toBeDefined();
if (!setSelectHook) {
throw new Error('Expected setSelect hook to be registered');
}
setSelectHook(uplotInstance);
expect(onDragSelect).not.toHaveBeenCalled();
});
it('calls onDragSelect with start and end times in milliseconds for a drag selection', () => {
const onDragSelect = jest.fn();
const builder = new UPlotConfigBuilder({ onDragSelect });
const config = builder.getConfig();
const setSelectHooks = config.hooks?.setSelect ?? [];
expect(setSelectHooks.length).toBe(1);
const posToVal = jest
.fn()
// left position
.mockReturnValueOnce(100)
// left + width
.mockReturnValueOnce(110);
const uplotInstance = ({
select: { left: 50, width: 20 },
posToVal,
} as unknown) as uPlot;
const setSelectHook = setSelectHooks[0];
expect(setSelectHook).toBeDefined();
if (!setSelectHook) {
throw new Error('Expected setSelect hook to be registered');
}
setSelectHook(uplotInstance);
expect(onDragSelect).toHaveBeenCalledTimes(1);
// 100 and 110 seconds converted to milliseconds
expect(onDragSelect).toHaveBeenCalledWith(100_000, 110_000);
});
it('adds and removes hooks via addHook, and exposes them through getConfig', () => {
const builder = new UPlotConfigBuilder();
const drawHook = jest.fn();
const remove = builder.addHook('draw', drawHook as uPlot.Hooks.Defs['draw']);
let config = builder.getConfig();
expect(config.hooks?.draw).toContain(drawHook);
// Remove and ensure it no longer appears in config
remove();
config = builder.getConfig();
expect(config.hooks?.draw ?? []).not.toContain(drawHook);
});
it('adds axes, scales, and series and wires them into the final config', () => {
const builder = new UPlotConfigBuilder();
// Add axis and scale
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
builder.addScale({ scaleKey: 'y' });
// Add two series legend indices should start from 1 (0 is the timestamp series)
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.addSeries(createSeriesProps({ label: 'Errors' }));
const config = builder.getConfig();
// Axes
expect(config.axes).toHaveLength(1);
expect(config.axes?.[0].scale).toBe('y');
// Scales are returned as an object keyed by scaleKey
expect(config.scales).toBeDefined();
expect(Object.keys(config.scales ?? {})).toContain('y');
// Series: base timestamp + 2 data series
expect(config.series).toHaveLength(3);
// Base series (index 0) has a value formatter that returns empty string
const baseSeries = config.series?.[0] as { value?: () => string };
expect(typeof baseSeries?.value).toBe('function');
expect(baseSeries?.value?.()).toBe('');
// Legend items align with series and carry label and color from series config
const legendItems = builder.getLegendItems();
expect(Object.keys(legendItems)).toEqual(['1', '2']);
expect(legendItems[1].seriesIndex).toBe(1);
expect(legendItems[1].label).toBe('Requests');
expect(legendItems[2].label).toBe('Errors');
});
it('merges axis when addAxis is called twice with same scaleKey', () => {
const builder = new UPlotConfigBuilder();
builder.addAxis({ scaleKey: 'y', label: 'Requests' });
builder.addAxis({ scaleKey: 'y', label: 'Updated Label', show: false });
const config = builder.getConfig();
expect(config.axes).toHaveLength(1);
expect(config.axes?.[0].label).toBe('Updated Label');
expect(config.axes?.[0].show).toBe(false);
});
it('merges scale when addScale is called twice with same scaleKey', () => {
const builder = new UPlotConfigBuilder();
builder.addScale({ scaleKey: 'y', min: 0 });
builder.addScale({ scaleKey: 'y', max: 100 });
const config = builder.getConfig();
// Only one scale entry for 'y' (merge path used, no duplicate added)
expect(config.scales).toBeDefined();
expect(Object.keys(config.scales ?? {})).toEqual(['y']);
expect(config.scales?.y?.range).toBeDefined();
});
it('restores visibility state from localStorage when selectionPreferencesSource is LOCAL_STORAGE', () => {
const visibilityMap = new Map<string, boolean>([
['Requests', true],
['Errors', false],
]);
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue(
visibilityMap,
);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.addSeries(createSeriesProps({ label: 'Errors' }));
const legendItems = builder.getLegendItems();
// When any series is hidden, legend visibility is driven by the stored map
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].show).toBe(false);
const config = builder.getConfig();
const [, firstSeries, secondSeries] = config.series ?? [];
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.show).toBe(false);
});
it('does not attempt to read stored visibility when using in-memory preferences', () => {
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.IN_MEMORY,
});
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.getLegendItems();
builder.getConfig();
expect(
getStoredSeriesVisibilityMock.getStoredSeriesVisibility,
).not.toHaveBeenCalled();
});
it('adds thresholds only once per scale key', () => {
const builder = new UPlotConfigBuilder();
const thresholdsOptions = {
scaleKey: 'y',
thresholds: [{ thresholdValue: 100 }],
};
builder.addThresholds(thresholdsOptions);
builder.addThresholds(thresholdsOptions);
const config = builder.getConfig();
const drawHooks = config.hooks?.draw ?? [];
// Only a single draw hook should be registered for the same scaleKey
expect(drawHooks.length).toBe(1);
});
it('merges cursor configuration with defaults instead of replacing them', () => {
const builder = new UPlotConfigBuilder();
builder.setCursor({
drag: { setScale: false },
});
const config = builder.getConfig();
expect(config.cursor?.drag?.setScale).toBe(false);
// Points configuration from DEFAULT_CURSOR_CONFIG should still be present
expect(config.cursor?.points).toBeDefined();
});
it('adds plugins and includes them in config', () => {
const builder = new UPlotConfigBuilder();
const plugin: uPlot.Plugin = {
opts: (): void => {},
hooks: {},
};
builder.addPlugin(plugin);
const config = builder.getConfig();
expect(config.plugins).toContain(plugin);
});
it('sets bands and includes them in config', () => {
const builder = new UPlotConfigBuilder();
const bands: uPlot.Band[] = [{ series: [1, 2], fill: (): string => '#000' }];
builder.setBands(bands);
const config = builder.getConfig();
expect(config.bands).toEqual(bands);
});
it('sets padding, legend, focus, select, tzDate and includes them in config', () => {
const tzDate = (ts: number): Date => new Date(ts);
const builder = new UPlotConfigBuilder();
builder.setPadding([10, 20, 30, 40]);
builder.setLegend({ show: true, live: true });
builder.setFocus({ alpha: 0.5 });
builder.setSelect({ left: 0, width: 0, top: 0, height: 0 });
builder.setTzDate(tzDate);
const config = builder.getConfig();
expect(config.padding).toEqual([10, 20, 30, 40]);
expect(config.legend).toEqual({ show: true, live: true });
expect(config.focus).toEqual({ alpha: 0.5 });
expect(config.select).toEqual({ left: 0, width: 0, top: 0, height: 0 });
expect(config.tzDate).toBe(tzDate);
});
it('does not include plugins when none added', () => {
const builder = new UPlotConfigBuilder();
const config = builder.getConfig();
expect(config.plugins).toBeUndefined();
});
it('does not include bands when empty', () => {
const builder = new UPlotConfigBuilder();
const config = builder.getConfig();
expect(config.bands).toBeUndefined();
});
});

View File

@@ -0,0 +1,235 @@
import type uPlot from 'uplot';
import * as scaleUtils from '../../utils/scale';
import type { ScaleProps } from '../types';
import { DistributionType } from '../types';
import { UPlotScaleBuilder } from '../UPlotScaleBuilder';
const createScaleProps = (overrides: Partial<ScaleProps> = {}): ScaleProps => ({
scaleKey: 'y',
time: false,
auto: undefined,
min: undefined,
max: undefined,
softMin: undefined,
softMax: undefined,
distribution: DistributionType.Linear,
...overrides,
});
describe('UPlotScaleBuilder', () => {
const getFallbackMinMaxSpy = jest.spyOn(
scaleUtils,
'getFallbackMinMaxTimeStamp',
);
beforeEach(() => {
jest.clearAllMocks();
});
it('initializes softMin/softMax correctly when both are 0 (treated as unset)', () => {
const builder = new UPlotScaleBuilder(
createScaleProps({
softMin: 0,
softMax: 0,
}),
);
// Non-time scale so config path uses thresholds pipeline; we just care that
// adjustSoftLimitsWithThresholds receives null soft limits instead of 0/0.
const adjustSpy = jest.spyOn(scaleUtils, 'adjustSoftLimitsWithThresholds');
builder.getConfig();
expect(adjustSpy).toHaveBeenCalledWith(null, null, undefined, undefined);
});
it('handles time scales using explicit min/max and rounds max down to the previous minute', () => {
const min = 1_700_000_000; // seconds
const max = 1_700_000_600; // seconds
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'x',
time: true,
min,
max,
}),
);
const config = builder.getConfig();
const xScale = config.x;
expect(xScale.time).toBe(true);
expect(xScale.auto).toBe(false);
expect(Array.isArray(xScale.range)).toBe(true);
const [resolvedMin, resolvedMax] = xScale.range as [number, number];
// min is passed through
expect(resolvedMin).toBe(min);
// max is coerced to "endTime - 1 minute" and rounded down to minute precision
const oneMinuteAgoTimestamp = (max - 60) * 1000;
const currentDate = new Date(oneMinuteAgoTimestamp);
currentDate.setSeconds(0);
currentDate.setMilliseconds(0);
const expectedMax = Math.floor(currentDate.getTime() / 1000);
expect(resolvedMax).toBe(expectedMax);
});
it('falls back to getFallbackMinMaxTimeStamp when time scale has no min/max', () => {
getFallbackMinMaxSpy.mockReturnValue({
fallbackMin: 100,
fallbackMax: 200,
});
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'x',
time: true,
min: undefined,
max: undefined,
}),
);
const config = builder.getConfig();
const [resolvedMin, resolvedMax] = config.x.range as [number, number];
expect(getFallbackMinMaxSpy).toHaveBeenCalled();
expect(resolvedMin).toBe(100);
// max is aligned to "fallbackMax - 60 seconds" minute boundary
expect(resolvedMax).toBeLessThanOrEqual(200);
expect(resolvedMax).toBeGreaterThan(100);
});
it('pipes limits through soft-limit adjustment and log-scale normalization before range config', () => {
const adjustSpy = jest.spyOn(scaleUtils, 'adjustSoftLimitsWithThresholds');
const normalizeSpy = jest.spyOn(scaleUtils, 'normalizeLogScaleLimits');
const getRangeConfigSpy = jest.spyOn(scaleUtils, 'getRangeConfig');
const thresholds = {
scaleKey: 'y',
thresholds: [{ thresholdValue: 10 }],
yAxisUnit: 'ms',
};
const builder = new UPlotScaleBuilder(
createScaleProps({
softMin: 1,
softMax: 5,
min: 0,
max: 100,
distribution: DistributionType.Logarithmic,
thresholds,
logBase: 2,
padMinBy: 0.1,
padMaxBy: 0.2,
}),
);
builder.getConfig();
expect(adjustSpy).toHaveBeenCalledWith(1, 5, thresholds.thresholds, 'ms');
expect(normalizeSpy).toHaveBeenCalledWith({
distr: DistributionType.Logarithmic,
logBase: 2,
limits: {
min: 0,
max: 100,
softMin: expect.anything(),
softMax: expect.anything(),
},
});
expect(getRangeConfigSpy).toHaveBeenCalled();
});
it('computes distribution config for non-time scales and wires range function when range is not provided', () => {
const createRangeFnSpy = jest.spyOn(scaleUtils, 'createRangeFunction');
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'y',
time: false,
distribution: DistributionType.Linear,
}),
);
const config = builder.getConfig();
const yScale = config.y;
expect(createRangeFnSpy).toHaveBeenCalled();
// range should be a function when not provided explicitly
expect(typeof yScale.range).toBe('function');
// distribution config should be applied
expect(yScale.distr).toBeDefined();
expect(yScale.log).toBeDefined();
});
it('respects explicit range function when provided on props', () => {
const explicitRange: uPlot.Scale.Range = jest.fn(() => [
0,
10,
]) as uPlot.Scale.Range;
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'y',
range: explicitRange,
}),
);
const config = builder.getConfig();
const yScale = config.y;
expect(yScale.range).toBe(explicitRange);
});
it('derives auto flag when not explicitly provided, based on hasFixedRange and time', () => {
const getRangeConfigSpy = jest.spyOn(scaleUtils, 'getRangeConfig');
const builder = new UPlotScaleBuilder(
createScaleProps({
min: 0,
max: 100,
time: false,
}),
);
const config = builder.getConfig();
const yScale = config.y;
expect(getRangeConfigSpy).toHaveBeenCalled();
// For non-time scale with fixed min/max, hasFixedRange is true → auto should remain false
expect(yScale.auto).toBe(false);
});
it('merge updates internal min/max/soft limits while preserving other props', () => {
const builder = new UPlotScaleBuilder(
createScaleProps({
scaleKey: 'y',
min: 0,
max: 10,
softMin: 1,
softMax: 9,
time: false,
}),
);
builder.merge({
min: 2,
softMax: undefined,
});
const config = builder.getConfig();
const yScale = config.y;
// We can't read private fields directly, but we can assert that rangeConfig
// has been recomputed using the merged values by checking that createRangeFunction
// is still called without throwing and returns a working range function.
expect(typeof yScale.range).toBe('function');
});
});

View File

@@ -0,0 +1,309 @@
import { themeColors } from 'constants/theme';
import uPlot from 'uplot';
import type { SeriesProps } from '../types';
import {
DrawStyle,
LineInterpolation,
LineStyle,
VisibilityMode,
} from '../types';
import { UPlotSeriesBuilder } from '../UPlotSeriesBuilder';
const createBaseProps = (
overrides: Partial<SeriesProps> = {},
): SeriesProps => ({
scaleKey: 'y',
label: 'Requests',
colorMapping: {},
drawStyle: DrawStyle.Line,
isDarkMode: false,
...overrides,
});
describe('UPlotSeriesBuilder', () => {
it('maps basic props into uPlot series config', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
label: 'Latency',
spanGaps: true,
show: false,
}),
);
const config = builder.getConfig();
expect(config.scale).toBe('y');
expect(config.label).toBe('Latency');
expect(config.spanGaps).toBe(true);
expect(config.show).toBe(false);
expect(config.pxAlign).toBe(true);
expect(typeof config.value).toBe('function');
});
it('uses explicit lineColor when provided, regardless of mapping', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
lineColor: '#ff00ff',
colorMapping: { Requests: '#00ff00' },
}),
);
const config = builder.getConfig();
expect(config.stroke).toBe('#ff00ff');
});
it('falls back to theme colors when no label is provided', () => {
const darkBuilder = new UPlotSeriesBuilder(
createBaseProps({
label: undefined,
isDarkMode: true,
lineColor: undefined,
}),
);
const lightBuilder = new UPlotSeriesBuilder(
createBaseProps({
label: undefined,
isDarkMode: false,
lineColor: undefined,
}),
);
const darkConfig = darkBuilder.getConfig();
const lightConfig = lightBuilder.getConfig();
expect(darkConfig.stroke).toBe(themeColors.white);
expect(lightConfig.stroke).toBe(themeColors.black);
});
it('uses colorMapping when available and no explicit lineColor is provided', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
label: 'Requests',
colorMapping: { Requests: '#123456' },
lineColor: undefined,
}),
);
const config = builder.getConfig();
expect(config.stroke).toBe('#123456');
});
it('passes through a custom pathBuilder when provided', () => {
const customPaths = (jest.fn() as unknown) as uPlot.Series.PathBuilder;
const builder = new UPlotSeriesBuilder(
createBaseProps({
pathBuilder: customPaths,
}),
);
const config = builder.getConfig();
expect(config.paths).toBe(customPaths);
});
it('disables line paths when drawStyle is Points, but still renders points', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Points,
pointSize: 4,
lineWidth: 2,
lineColor: '#aa00aa',
}),
);
const config = builder.getConfig();
expect(typeof config.paths).toBe('function');
expect(config.paths && config.paths({} as uPlot, 1, 0, 10)).toBeNull();
expect(config.points).toBeDefined();
expect(config.points?.stroke).toBe('#aa00aa');
expect(config.points?.fill).toBe('#aa00aa');
expect(config.points?.show).toBe(true);
expect(config.points?.size).toBe(4);
});
it('derives point size based on lineWidth and pointSize', () => {
const smallPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
lineWidth: 4,
pointSize: 2,
}),
);
const largePointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
lineWidth: 2,
pointSize: 4,
}),
);
const smallConfig = smallPointsBuilder.getConfig();
const largeConfig = largePointsBuilder.getConfig();
expect(smallConfig.points?.size).toBeUndefined();
expect(largeConfig.points?.size).toBe(4);
});
it('uses pointsBuilder when provided instead of default visibility logic', () => {
const pointsBuilder: uPlot.Series.Points.Show = jest.fn(
() => true,
) as uPlot.Series.Points.Show;
const builder = new UPlotSeriesBuilder(
createBaseProps({
pointsBuilder,
drawStyle: DrawStyle.Line,
}),
);
const config = builder.getConfig();
expect(config.points?.show).toBe(pointsBuilder);
});
it('respects VisibilityMode for point visibility when no custom pointsBuilder is given', () => {
const neverPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
showPoints: VisibilityMode.Never,
}),
);
const alwaysPointsBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
showPoints: VisibilityMode.Always,
}),
);
const neverConfig = neverPointsBuilder.getConfig();
const alwaysConfig = alwaysPointsBuilder.getConfig();
expect(neverConfig.points?.show).toBe(false);
expect(alwaysConfig.points?.show).toBe(true);
});
it('applies LineStyle.Dashed and lineCap to line config', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
lineStyle: LineStyle.Dashed,
lineCap: 'round' as CanvasLineCap,
}),
);
const config = builder.getConfig();
expect(config.dash).toEqual([10, 10]);
expect(config.cap).toBe('round');
});
it('builds default paths for Line drawStyle and invokes the path builder', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.Linear,
}),
);
const config = builder.getConfig();
expect(typeof config.paths).toBe('function');
const result = config.paths?.({} as uPlot, 1, 0, 10);
expect(result).toBeDefined();
});
it('uses StepBefore and StepAfter interpolation for line paths', () => {
const stepBeforeBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.StepBefore,
}),
);
const stepAfterBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.StepAfter,
}),
);
const stepBeforeConfig = stepBeforeBuilder.getConfig();
const stepAfterConfig = stepAfterBuilder.getConfig();
expect(typeof stepBeforeConfig.paths).toBe('function');
expect(typeof stepAfterConfig.paths).toBe('function');
expect(stepBeforeConfig.paths?.({} as uPlot, 1, 0, 5)).toBeDefined();
expect(stepAfterConfig.paths?.({} as uPlot, 1, 0, 5)).toBeDefined();
});
it('defaults to spline interpolation when lineInterpolation is Spline or undefined', () => {
const splineBuilder = new UPlotSeriesBuilder(
createBaseProps({
drawStyle: DrawStyle.Line,
lineInterpolation: LineInterpolation.Spline,
}),
);
const defaultBuilder = new UPlotSeriesBuilder(
createBaseProps({ drawStyle: DrawStyle.Line }),
);
const splineConfig = splineBuilder.getConfig();
const defaultConfig = defaultBuilder.getConfig();
expect(typeof splineConfig.paths).toBe('function');
expect(typeof defaultConfig.paths).toBe('function');
});
it('coerces non-boolean spanGaps to false', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({ spanGaps: undefined }),
);
const config = builder.getConfig();
expect(config.spanGaps).toBe(false);
});
it('preserves spanGaps true when provided as boolean', () => {
const builder = new UPlotSeriesBuilder(createBaseProps({ spanGaps: true }));
const config = builder.getConfig();
expect(config.spanGaps).toBe(true);
});
it('uses generateColor when label has no colorMapping and no lineColor', () => {
const builder = new UPlotSeriesBuilder(
createBaseProps({
label: 'CustomSeries',
colorMapping: {},
lineColor: undefined,
}),
);
const config = builder.getConfig();
expect(config.stroke).toBeDefined();
expect(typeof config.stroke).toBe('string');
expect((config.stroke as string).length).toBeGreaterThan(0);
});
it('passes through pointsFilter when provided', () => {
const pointsFilter: uPlot.Series.Points.Filter = jest.fn(
(_self, _seriesIdx, _show) => null,
);
const builder = new UPlotSeriesBuilder(
createBaseProps({
pointsFilter,
drawStyle: DrawStyle.Line,
}),
);
const config = builder.getConfig();
expect(config.points?.filter).toBe(pointsFilter);
});
});

View File

@@ -151,8 +151,7 @@ func (c *AccountConfig) Value() (driver.Value, error) {
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
}
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
return string(serialized), nil
return serialized, nil
}
type AgentReport struct {
@@ -187,8 +186,7 @@ func (r *AgentReport) Value() (driver.Value, error) {
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
)
}
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
return string(serialized), nil
return serialized, nil
}
type CloudIntegrationService struct {
@@ -242,6 +240,5 @@ func (c *CloudServiceConfig) Value() (driver.Value, error) {
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
)
}
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
return string(serialized), nil
return serialized, nil
}

View File

@@ -1,206 +0,0 @@
from http import HTTPStatus
from typing import Callable
import requests
from sqlalchemy import text
from wiremock.client import (
HttpMethods,
Mapping,
MappingRequest,
MappingResponse,
WireMockMatchers,
)
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def cleanup_cloud_accounts(postgres: types.TestContainerSQL) -> None:
try:
with postgres.conn.connect() as conn:
conn.execute(text("TRUNCATE TABLE cloud_integration CASCADE"))
conn.commit()
logger.info("Cleaned up cloud_integration table")
except Exception as e:
logger.info(f"Cleanup skipped: {str(e)[:100]}")
def test_generate_connection_url(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test to generate connection URL for AWS CloudFormation stack deployment."""
# Clean up any corrupted data from previous test runs
cleanup_cloud_accounts(postgres)
# Get authentication token for admin user
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
# Mock the deployment info query (for license validation)
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
# Prepare request payload
request_payload = {
"account_config": {"regions": ["us-east-1", "us-west-2"]},
"agent_config": {
"region": "us-east-1",
"ingestion_url": "https://ingest.test.signoz.cloud",
"ingestion_key": "test-ingestion-key-123456",
"signoz_api_url": "https://test-deployment.test.signoz.cloud",
"signoz_api_key": "test-api-key-789",
"version": "v0.0.8",
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=request_payload,
timeout=10,
)
# Assert successful response
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}: {response.text}"
# Parse response JSON
response_data = response.json()
# Assert response structure contains expected data
assert "data" in response_data, "Response should contain 'data' field"
# Assert required fields in the response data
expected_fields = ["account_id", "connection_url"]
for field in expected_fields:
assert (
field in response_data["data"]
), f"Response data should contain '{field}' field"
data = response_data["data"]
# Assert account_id is a valid UUID format
assert (
len(data["account_id"]) > 0
), "account_id should be a non-empty string (UUID)"
# Assert connection_url contains expected CloudFormation parameters
connection_url = data["connection_url"]
# Verify it's an AWS CloudFormation URL
assert (
"console.aws.amazon.com/cloudformation" in connection_url
), "connection_url should be an AWS CloudFormation URL"
# Verify region is included
assert (
"region=us-east-1" in connection_url
), "connection_url should contain the specified region"
# Verify required parameters are in the URL
required_params = [
"param_SigNozIntegrationAgentVersion=v0.0.8",
"param_SigNozApiUrl=https%3A%2F%2Ftest-deployment.test.signoz.cloud",
"param_SigNozApiKey=test-api-key-789",
"param_SigNozAccountId=", # Will be a UUID
"param_IngestionUrl=https%3A%2F%2Fingest.test.signoz.cloud",
"param_IngestionKey=test-ingestion-key-123456",
"stackName=signoz-integration",
"templateURL=https%3A%2F%2Fsignoz-integrations.s3.us-east-1.amazonaws.com%2Faws-quickcreate-template-v0.0.8.json",
]
for param in required_params:
assert (
param in connection_url
), f"connection_url should contain parameter: {param}"
logger.info("Connection URL generated successfully")
logger.info(f"Account ID: {data['account_id']}")
logger.info(f"Connection URL length: {len(connection_url)} characters")
def test_generate_connection_url_unsupported_provider(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
) -> None:
"""Test that unsupported cloud providers return an error."""
# Get authentication token for admin user
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Try with GCP (unsupported)
cloud_provider = "gcp"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
request_payload = {
"account_config": {"regions": ["us-central1"]},
"agent_config": {
"region": "us-central1",
"ingestion_url": "https://ingest.test.signoz.cloud",
"ingestion_key": "test-ingestion-key-123456",
"signoz_api_url": "https://test-deployment.test.signoz.cloud",
"signoz_api_key": "test-api-key-789",
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=request_payload,
timeout=10,
)
# Should return Bad Request for unsupported provider
assert (
response.status_code == HTTPStatus.BAD_REQUEST
), f"Expected 400 for unsupported provider, got {response.status_code}"
response_data = response.json()
assert "error" in response_data, "Response should contain 'error' field"
assert (
"unsupported cloud provider" in response_data["error"].lower()
), "Error message should indicate unsupported provider"
logger.info("Unsupported provider correctly rejected with 400 Bad Request")

View File

@@ -1,590 +0,0 @@
from http import HTTPStatus
from typing import Callable
import uuid
import pytest
import requests
from sqlalchemy import text
from wiremock.client import (
HttpMethods,
Mapping,
MappingRequest,
MappingResponse,
WireMockMatchers,
)
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def cleanup_cloud_accounts(postgres: types.TestContainerSQL) -> None:
try:
with postgres.conn.connect() as conn:
# Try to delete all records instead of truncate in case table exists
conn.execute(text("DELETE FROM cloud_integration"))
conn.commit()
logger.info("Cleaned up cloud_integration table")
except Exception as e:
# Table might not exist, which is fine
logger.info(f"Cleanup skipped or partial: {str(e)[:100]}")
def generate_unique_cloud_account_id() -> str:
"""Generate a unique cloud account ID for testing."""
# Use last 12 digits of UUID to simulate AWS account ID format
return str(uuid.uuid4().int)[:12]
def simulate_agent_checkin(
signoz: types.SigNoz,
admin_token: str,
cloud_provider: str,
account_id: str,
cloud_account_id: str,
) -> dict:
"""Simulate an agent check-in to mark the account as connected.
Returns:
dict with the response from check-in
"""
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/agent-check-in"
)
checkin_payload = {
"account_id": account_id,
"cloud_account_id": cloud_account_id,
"data": {},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=checkin_payload,
timeout=10,
)
if response.status_code != HTTPStatus.OK:
logger.error(f"Agent check-in failed: {response.status_code}, response: {response.text}")
assert (
response.status_code == HTTPStatus.OK
), f"Agent check-in failed: {response.status_code}"
logger.info(f"Agent check-in completed for account: {account_id}")
response_data = response.json()
return response_data.get("data", response_data)
def create_test_account(
signoz: types.SigNoz,
admin_token: str,
cloud_provider: str = "aws",
) -> dict:
"""Create a test account via generate-connection-url.
Returns the data as-is from the API response. Caller is responsible for
doing agent check-in if needed to mark the account as connected.
Returns:
dict with account_id and connection_url from the API
"""
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
)
request_payload = {
"account_config": {"regions": ["us-east-1"]},
"agent_config": {
"region": "us-east-1",
"ingestion_url": "https://ingest.test.signoz.cloud",
"ingestion_key": "test-ingestion-key-123456",
"signoz_api_url": "https://test-deployment.test.signoz.cloud",
"signoz_api_key": "test-api-key-789",
"version": "v0.0.8",
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=request_payload,
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Failed to create test account: {response.status_code}"
response_data = response.json()
# API returns data wrapped in {'status': 'success', 'data': {...}}
data = response_data.get("data", response_data)
return data
def test_list_connected_accounts_empty(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing connected accounts when there are none."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
# API returns data wrapped in {'status': 'success', 'data': {...}}
data = response_data.get("data", response_data)
assert "accounts" in data, "Response should contain 'accounts' field"
assert isinstance(data["accounts"], list), "Accounts should be a list"
# Note: If table doesn't exist yet, cleanup won't work and there might be leftover data
# This is acceptable for integration tests with --reuse flag
initial_count = len(data["accounts"])
logger.info(f"Accounts list returned successfully with {initial_count} existing account(s)")
def test_list_connected_accounts_with_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing connected accounts after creating one."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
# Simulate agent check-in to mark as connected
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
# List accounts
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "accounts" in data, "Response should contain 'accounts' field"
assert isinstance(data["accounts"], list), "Accounts should be a list"
# Find our account in the list (there may be leftover accounts from previous test runs)
account = next((a for a in data["accounts"] if a["id"] == account_id), None)
assert account is not None, f"Account {account_id} should be found in list"
assert account["id"] == account_id, "Account ID should match"
assert "config" in account, "Account should have config field"
assert "status" in account, "Account should have status field"
logger.info(f"Found account in list: {account_id}")
def test_get_account_status(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting the status of a specific account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account (no check-in needed for status check)
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
# Get account status
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{account_id}/status"
)
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "id" in data, "Response should contain 'id' field"
assert data["id"] == account_id, "Account ID should match"
assert "status" in data, "Response should contain 'status' field"
assert "integration" in data["status"], "Status should contain 'integration' field"
logger.info(f"Retrieved status for account: {account_id}")
def test_get_account_status_not_found(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting status for a non-existent account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
fake_account_id = "00000000-0000-0000-0000-000000000000"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{fake_account_id}/status"
)
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 404, got {response.status_code}"
logger.info("Non-existent account correctly returned 404")
def test_update_account_config(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test updating account configuration."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
# Simulate agent check-in to mark as connected
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
# Update account configuration
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{account_id}/config"
)
updated_config = {
"config": {"regions": ["us-east-1", "us-west-2", "eu-west-1"]}
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=updated_config,
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "id" in data, "Response should contain 'id' field"
assert data["id"] == account_id, "Account ID should match"
# Verify the update by listing accounts
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_response_data = list_response.json()
list_data = list_response_data.get("data", list_response_data)
account = next((a for a in list_data["accounts"] if a["id"] == account_id), None)
assert account is not None, "Account should be found in list"
assert "config" in account, "Account should have config"
assert "regions" in account["config"], "Config should have regions"
assert len(account["config"]["regions"]) == 3, "Should have 3 regions"
assert set(account["config"]["regions"]) == {
"us-east-1",
"us-west-2",
"eu-west-1",
}, "Regions should match updated config"
logger.info(f"Updated account configuration: {account_id}")
def test_disconnect_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test disconnecting an account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
# Simulate agent check-in to mark as connected
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
# Disconnect the account
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{account_id}/disconnect"
)
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
# Verify account is no longer in the connected list
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_response_data = list_response.json()
list_data = list_response_data.get("data", list_response_data)
assert len(list_data["accounts"]) == 0, "Should have no connected accounts"
logger.info(f"Disconnected account: {account_id}")
def test_disconnect_account_not_found(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test disconnecting a non-existent account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
fake_account_id = "00000000-0000-0000-0000-000000000000"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{fake_account_id}/disconnect"
)
response = requests.delete(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 404, got {response.status_code}"
logger.info("Disconnect non-existent account correctly returned 404")
def test_list_accounts_unsupported_provider(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing accounts for an unsupported cloud provider."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "gcp" # Unsupported provider
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.BAD_REQUEST
), f"Expected 400, got {response.status_code}"
logger.info("Unsupported provider correctly rejected with 400 Bad Request")

View File

@@ -1,787 +0,0 @@
from http import HTTPStatus
from typing import Callable
import uuid
import requests
from sqlalchemy import text
from wiremock.client import (
HttpMethods,
Mapping,
MappingRequest,
MappingResponse,
WireMockMatchers,
)
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def cleanup_cloud_accounts(postgres: types.TestContainerSQL) -> None:
"""Clean up cloud_integration table to avoid corrupted data issues."""
try:
with postgres.conn.connect() as conn:
# Try to delete all records instead of truncate in case table exists
conn.execute(text("DELETE FROM cloud_integration"))
conn.commit()
logger.info("Cleaned up cloud_integration table")
except Exception as e:
# Table might not exist, which is fine
logger.info(f"Cleanup skipped or partial: {str(e)[:100]}")
def generate_unique_cloud_account_id() -> str:
"""Generate a unique cloud account ID for testing."""
# Use last 12 digits of UUID to simulate AWS account ID format
return str(uuid.uuid4().int)[:12]
def simulate_agent_checkin(
signoz: types.SigNoz,
admin_token: str,
cloud_provider: str,
account_id: str,
cloud_account_id: str,
) -> dict:
"""Simulate an agent check-in to mark the account as connected.
Returns:
dict with the response from check-in
"""
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/agent-check-in"
checkin_payload = {
"account_id": account_id,
"cloud_account_id": cloud_account_id,
"data": {},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=checkin_payload,
timeout=10,
)
if response.status_code != HTTPStatus.OK:
logger.error(
f"Agent check-in failed: {response.status_code}, response: {response.text}"
)
assert (
response.status_code == HTTPStatus.OK
), f"Agent check-in failed: {response.status_code}"
logger.info(f"Agent check-in completed for account: {account_id}")
response_data = response.json()
return response_data.get("data", response_data)
def create_test_account(
signoz: types.SigNoz,
admin_token: str,
cloud_provider: str = "aws",
) -> dict:
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
request_payload = {
"account_config": {"regions": ["us-east-1"]},
"agent_config": {
"region": "us-east-1",
"ingestion_url": "https://ingest.test.signoz.cloud",
"ingestion_key": "test-ingestion-key-123456",
"signoz_api_url": "https://test-deployment.test.signoz.cloud",
"signoz_api_key": "test-api-key-789",
"version": "v0.0.8",
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=request_payload,
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Failed to create test account: {response.status_code}"
response_data = response.json()
# API returns data wrapped in {'status': 'success', 'data': {...}}
data = response_data.get("data", response_data)
return data
def test_list_services_without_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing available services without specifying an account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "services" in data, "Response should contain 'services' field"
assert isinstance(data["services"], list), "Services should be a list"
assert len(data["services"]) > 0, "Should have at least one service available"
# Verify service structure
service = data["services"][0]
assert "id" in service, "Service should have 'id' field"
assert "title" in service, "Service should have 'title' field"
assert "icon" in service, "Service should have 'icon' field"
def test_list_services_with_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing services for a specific connected account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# List services for the account
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services?cloud_account_id={cloud_account_id}"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "services" in data, "Response should contain 'services' field"
assert isinstance(data["services"], list), "Services should be a list"
assert len(data["services"]) > 0, "Should have at least one service available"
# Services should include config field (may be null if not configured)
service = data["services"][0]
assert "id" in service, "Service should have 'id' field"
assert "title" in service, "Service should have 'title' field"
assert "icon" in service, "Service should have 'icon' field"
logger.info(
f"Listed {len(data['services'])} services for account {cloud_account_id}"
)
def test_get_service_details_without_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting service details without specifying an account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
# First get the list of services to get a valid service ID
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
assert len(list_data["services"]) > 0, "Should have at least one service"
service_id = list_data["services"][0]["id"]
# Get service details
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
# Verify service details structure
assert "id" in data, "Service details should have 'id' field"
assert data["id"] == service_id, "Service ID should match requested ID"
assert "title" in data, "Service details should have 'name' field"
assert "overview" in data, "Service details should have 'overview' field"
# assert assets to had list of dashboards
assert "assets" in data, "Service details should have 'assets' field"
assert isinstance(data["assets"], dict), "Assets should be a dictionary"
logger.info(f"Retrieved details for service: {service_id}")
def test_get_service_details_with_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting service details for a specific connected account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Get list of services first
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
assert len(list_data["services"]) > 0, "Should have at least one service"
service_id = list_data["services"][0]["id"]
# Get service details with account
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}?cloud_account_id={cloud_account_id}"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
# Verify service details structure
assert "id" in data, "Service details should have 'id' field"
assert data["id"] == service_id, "Service ID should match requested ID"
assert "title" in data, "Service details should have 'title' field"
assert "overview" in data, "Service details should have 'overview' field"
assert "assets" in data, "Service details should have 'assets' field"
assert "config" in data, "Service details should have 'config' field"
assert "status" in data, "Config should have 'status' field"
logger.info(
f"Retrieved details for service {service_id} with account {cloud_account_id}"
)
def test_get_service_details_invalid_service(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting details for a non-existent service."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
fake_service_id = "non-existent-service"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{fake_service_id}"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 404, got {response.status_code}"
logger.info("Non-existent service correctly returned 404")
def test_list_services_unsupported_provider(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing services for an unsupported cloud provider."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "gcp" # Unsupported provider
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.BAD_REQUEST
), f"Expected 400, got {response.status_code}"
logger.info(
"Unsupported provider correctly rejected with 400 Bad Request for services"
)
def test_update_service_config(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test updating service configuration for a connected account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Get list of services to pick a valid service ID
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
assert len(list_data["services"]) > 0, "Should have at least one service"
service_id = list_data["services"][0]["id"]
# Update service configuration
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
config_payload = {
"cloud_account_id": cloud_account_id,
"config": {
"metrics": {"enabled": True},
"logs": {"enabled": True},
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=config_payload,
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
# Verify response structure
assert "id" in data, "Response should contain 'id' field"
assert data["id"] == service_id, "Service ID should match"
assert "config" in data, "Response should contain 'config' field"
assert "metrics" in data["config"], "Config should contain 'metrics' field"
assert "logs" in data["config"], "Config should contain 'logs' field"
logger.info(f"Updated service config for {service_id} on account {cloud_account_id}")
def test_update_service_config_without_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test updating service config without a connected account should fail."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
# Get a valid service ID
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
service_id = list_data["services"][0]["id"]
# Try to update config with non-existent account
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
fake_cloud_account_id = generate_unique_cloud_account_id()
config_payload = {
"cloud_account_id": fake_cloud_account_id,
"config": {
"metrics": {"enabled": True},
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=config_payload,
timeout=10,
)
# todo: improve the handler logic to return 500
assert (
response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR
), f"Expected 500 for non-existent account, got {response.status_code}"
logger.info("Update service config correctly rejected for non-existent account")
def test_update_service_config_invalid_service(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test updating config for a non-existent service should fail."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Try to update config for invalid service
fake_service_id = "non-existent-service"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{fake_service_id}/config"
config_payload = {
"cloud_account_id": cloud_account_id,
"config": {
"metrics": {"enabled": True},
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=config_payload,
timeout=10,
)
assert (
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 404 for invalid service, got {response.status_code}"
logger.info("Update service config correctly rejected for invalid service")
def test_update_service_config_disable_service(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test disabling a service by updating config with enabled=false."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Get a valid service
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
service_id = list_data["services"][0]["id"]
# First enable the service
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
enable_payload = {
"cloud_account_id": cloud_account_id,
"config": {
"metrics": {"enabled": True},
},
}
enable_response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=enable_payload,
timeout=10,
)
assert enable_response.status_code == HTTPStatus.OK, "Failed to enable service"
# Now disable the service
disable_payload = {
"cloud_account_id": cloud_account_id,
"config": {
"metrics": {"enabled": False},
"logs": {"enabled": False},
},
}
disable_response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=disable_payload,
timeout=10,
)
assert (
disable_response.status_code == HTTPStatus.OK
), f"Expected 200, got {disable_response.status_code}"
response_data = disable_response.json()
data = response_data.get("data", response_data)
# Verify service is disabled
assert data["config"]["metrics"]["enabled"] == False, "Metrics should be disabled"
assert data["config"]["logs"]["enabled"] == False, "Logs should be disabled"
logger.info(f"Successfully disabled service {service_id}")