chore: use uv (#9964)

This commit is contained in:
Srikanth Chekuri
2026-01-10 14:03:43 +05:30
committed by GitHub
parent 0a81bf8060
commit 2c6c034e60
8 changed files with 1323 additions and 2249 deletions

View File

@@ -21,11 +21,11 @@ jobs:
uses: actions/setup-python@v5
with:
python-version: 3.13
- name: poetry
- name: uv
uses: astral-sh/setup-uv@v4
- name: install
run: |
python -m pip install poetry==2.1.2
python -m poetry config virtualenvs.in-project true
cd tests/integration && poetry install --no-root
cd tests/integration && uv sync
- name: fmt
run: |
make py-fmt
@@ -67,11 +67,11 @@ jobs:
uses: actions/setup-python@v5
with:
python-version: 3.13
- name: poetry
- name: uv
uses: astral-sh/setup-uv@v4
- name: install
run: |
python -m pip install poetry==2.1.2
python -m poetry config virtualenvs.in-project true
cd tests/integration && poetry install --no-root
cd tests/integration && uv sync
- name: webdriver
run: |
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
@@ -89,7 +89,7 @@ jobs:
- name: run
run: |
cd tests/integration && \
poetry run pytest \
uv run pytest \
--basetemp=./tmp/ \
src/${{matrix.src}} \
--sqlstore-provider ${{matrix.sqlstore-provider}} \

2
.gitignore vendored
View File

@@ -222,8 +222,6 @@ cython_debug/
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml
# ruff
.ruff_cache/

View File

@@ -202,25 +202,25 @@ docker-buildx-enterprise: go-build-enterprise js-build
##############################################################
.PHONY: py-fmt
py-fmt: ## Run black for integration tests
@cd tests/integration && poetry run black .
@cd tests/integration && uv run black .
.PHONY: py-lint
py-lint: ## Run lint for integration tests
@cd tests/integration && poetry run isort .
@cd tests/integration && poetry run autoflake .
@cd tests/integration && poetry run pylint .
@cd tests/integration && uv run isort .
@cd tests/integration && uv run autoflake .
@cd tests/integration && uv run pylint .
.PHONY: py-test-setup
py-test-setup: ## Runs integration tests
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --reuse --capture=no src/bootstrap/setup.py::test_setup
@cd tests/integration && uv run pytest --basetemp=./tmp/ -vv --reuse --capture=no src/bootstrap/setup.py::test_setup
.PHONY: py-test-teardown
py-test-teardown: ## Runs integration tests with teardown
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --teardown --capture=no src/bootstrap/setup.py::test_teardown
@cd tests/integration && uv run pytest --basetemp=./tmp/ -vv --teardown --capture=no src/bootstrap/setup.py::test_teardown
.PHONY: py-test
py-test: ## Runs integration tests
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
@cd tests/integration && uv run pytest --basetemp=./tmp/ -vv --capture=no src/
.PHONY: py-clean
py-clean: ## Clear all pycache and pytest cache from tests directory recursively

View File

@@ -9,7 +9,7 @@ SigNoz uses integration tests to verify that different components work together
Before running integration tests, ensure you have the following installed:
- Python 3.13+
- Poetry (for dependency management)
- [uv](https://docs.astral.sh/uv/getting-started/installation/)
- Docker (for containerized services)
### Initial Setup
@@ -19,17 +19,19 @@ Before running integration tests, ensure you have the following installed:
cd tests/integration
```
2. Install dependencies using Poetry:
2. Install dependencies using uv:
```bash
poetry install --no-root
uv sync
```
> **_NOTE:_** the build backend could throw an error while installing `psycopg2`, pleae see https://www.psycopg.org/docs/install.html#build-prerequisites
### Starting the Test Environment
To spin up all the containers necessary for writing integration tests and keep them running:
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/setup.py::test_setup
uv run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/setup.py::test_setup
```
This command will:
@@ -42,7 +44,7 @@ This command will:
When you're done writing integration tests, clean up the environment:
```bash
poetry run pytest --basetemp=./tmp/ -vv --teardown -s src/bootstrap/setup.py::test_teardown
uv run pytest --basetemp=./tmp/ -vv --teardown -s src/bootstrap/setup.py::test_teardown
```
This will destroy the running integration test setup and clean up resources.
@@ -72,7 +74,7 @@ Python and pytest form the foundation of the integration testing framework. Test
│ ├── sqlite.py
│ ├── types.py
│ └── zookeeper.py
├── poetry.lock
├── uv.lock
├── pyproject.toml
└── src
└── bootstrap
@@ -125,7 +127,7 @@ def test_version(signoz: types.SigNoz) -> None:
We have written a simple test which calls the `version` endpoint of the container in step 1. In **order to just run this function, run the following command:**
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/e_version.py::test_version
uv run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/e_version.py::test_version
```
> Note: The `--reuse` flag is used to reuse the environment if it is already running. Always use this flag when writing and running integration tests. If you don't use this flag, the environment will be destroyed and recreated every time you run the test.
@@ -153,7 +155,7 @@ def test_user_registration(signoz: types.SigNoz) -> None:
},
timeout=2,
)
assert response.status_code == HTTPStatus.OK
assert response.json()["setupCompleted"] is True
```
@@ -163,27 +165,27 @@ def test_user_registration(signoz: types.SigNoz) -> None:
### Running All Tests
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/
uv run pytest --basetemp=./tmp/ -vv --reuse src/
```
### Running Specific Test Categories
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/<suite>
uv run pytest --basetemp=./tmp/ -vv --reuse src/<suite>
# Run querier tests
poetry run pytest --basetemp=./tmp/ -vv --reuse src/querier/
uv run pytest --basetemp=./tmp/ -vv --reuse src/querier/
# Run auth tests
poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/
uv run pytest --basetemp=./tmp/ -vv --reuse src/auth/
```
### Running Individual Tests
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse src/<suite>/<file>.py::test_name
uv run pytest --basetemp=./tmp/ -vv --reuse src/<suite>/<file>.py::test_name
# Run test_register in file a_register.py in auth suite
poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/a_register.py::test_register
uv run pytest --basetemp=./tmp/ -vv --reuse src/auth/a_register.py::test_register
```
## How to configure different options for integration tests?
@@ -197,7 +199,7 @@ Tests can be configured using pytest options:
Example:
```bash
poetry run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postgres-version=14 src/auth/
uv run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postgres-version=14 src/auth/
```

File diff suppressed because it is too large Load Diff

View File

@@ -1,33 +1,29 @@
[tool.poetry]
[project]
name = "integration"
version = "0.1.0"
description = ""
authors = ["therealpandey <vibhupandey28@gmail.com>"]
readme = "README.md"
authors = [{ name = "therealpandey", email = "vibhupandey28@gmail.com" }]
requires-python = ">=3.13"
dependencies = [
"pytest>=8.3.5",
"psycopg2>=2.9.10",
"testcontainers[clickhouse,keycloak,postgres]>=4.13.1",
"wiremock>=2.6.1",
"numpy>=2.3.2",
"clickhouse-connect>=0.8.18",
"svix-ksuid>=0.6.2",
"requests>=2.32.4",
"sqlalchemy>=2.0.43",
"selenium>=4.35.0",
]
[tool.poetry.dependencies]
python = "^3.13"
pytest = "^8.3.5"
psycopg2 = "^2.9.10"
testcontainers = {extras = ["clickhouse", "keycloak", "postgres"], version = "^4.13.1"}
wiremock = "^2.6.1"
numpy = "^2.3.2"
clickhouse-connect = "^0.8.18"
svix-ksuid = "^0.6.2"
requests = "^2.32.4"
sqlalchemy = "^2.0.43"
selenium = "^4.35.0"
[tool.poetry.group.dev.dependencies]
pylint = "^3.3.6"
isort = "^6.0.1"
autoflake = "^2.3.1"
black = "^25.1.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[dependency-groups]
dev = [
"pylint>=3.3.6",
"isort>=6.0.1",
"autoflake>=2.3.1",
"black>=25.1.0",
]
[tool.pytest.ini_options]
python_files = "src/**/**.py"
@@ -53,4 +49,4 @@ recursive = true
remove-all-unused-imports = true
remove-unused-variables = true
exclude = [".venv/**"]
in-place = true
in-place = true

View File

@@ -30,29 +30,35 @@ def test_logs_json_body_simple_searches(
now = datetime.now(tz=timezone.utc)
# Log with simple JSON body
log1_body = json.dumps({
"message": "User logged in successfully",
"status": 200,
"active": True,
"level": "info",
"code": 100
})
log1_body = json.dumps(
{
"message": "User logged in successfully",
"status": 200,
"active": True,
"level": "info",
"code": 100,
}
)
log2_body = json.dumps({
"message": "User authentication failed",
"status": 401,
"active": False,
"level": "error",
"code": 401
})
log2_body = json.dumps(
{
"message": "User authentication failed",
"status": 401,
"active": False,
"level": "error",
"code": 401,
}
)
log3_body = json.dumps({
"message": "Database connection established",
"status": 200,
"active": True,
"level": "info",
"code": 200
})
log3_body = json.dumps(
{
"message": "Database connection established",
"status": 200,
"active": True,
"level": "info",
"code": 200,
}
)
insert_logs(
[
@@ -102,7 +108,9 @@ def test_logs_json_body_simple_searches(
"disabled": False,
"limit": 100,
"offset": 0,
"filter": {"expression": 'body.message CONTAINS "logged in"'},
"filter": {
"expression": 'body.message CONTAINS "logged in"'
},
"order": [
{"key": {"name": "timestamp"}, "direction": "desc"},
],
@@ -309,92 +317,56 @@ def test_logs_json_body_nested_keys(
"""
now = datetime.now(tz=timezone.utc)
log1_body = json.dumps({
"user": {
"name": "john_doe",
"id": 12345,
"email": "john@example.com"
},
"request": {
"method": "GET",
"secure": True,
"headers": {
"content_type": "application/json",
"authorization": "Bearer token123"
}
},
"metadata": {
"tags": {
"environment": "production",
"region": "us-east-1"
}
},
"response": {
"status": {
"code": 200,
"message": "OK"
log1_body = json.dumps(
{
"user": {"name": "john_doe", "id": 12345, "email": "john@example.com"},
"request": {
"method": "GET",
"secure": True,
"headers": {
"content_type": "application/json",
"authorization": "Bearer token123",
},
},
"latency": 123.45
"metadata": {"tags": {"environment": "production", "region": "us-east-1"}},
"response": {"status": {"code": 200, "message": "OK"}, "latency": 123.45},
}
})
)
log2_body = json.dumps({
"user": {
"name": "jane_smith",
"id": 67890,
"email": "jane@example.com"
},
"request": {
"method": "POST",
"secure": False,
"headers": {
"content_type": "text/html",
"authorization": "Bearer token456"
}
},
"metadata": {
"tags": {
"environment": "staging",
"region": "us-west-2"
}
},
"response": {
"status": {
"code": 201,
"message": "Created"
log2_body = json.dumps(
{
"user": {"name": "jane_smith", "id": 67890, "email": "jane@example.com"},
"request": {
"method": "POST",
"secure": False,
"headers": {
"content_type": "text/html",
"authorization": "Bearer token456",
},
},
"metadata": {"tags": {"environment": "staging", "region": "us-west-2"}},
"response": {
"status": {"code": 201, "message": "Created"},
"latency": 456.78,
},
"latency": 456.78
}
})
)
log3_body = json.dumps({
"user": {
"name": "john_doe",
"id": 11111,
"email": "john2@example.com"
},
"request": {
"method": "PUT",
"secure": True,
"headers": {
"content_type": "application/json",
"authorization": "Bearer token789"
}
},
"metadata": {
"tags": {
"environment": "production",
"region": "eu-west-1"
}
},
"response": {
"status": {
"code": 200,
"message": "OK"
log3_body = json.dumps(
{
"user": {"name": "john_doe", "id": 11111, "email": "john2@example.com"},
"request": {
"method": "PUT",
"secure": True,
"headers": {
"content_type": "application/json",
"authorization": "Bearer token789",
},
},
"latency": 123.45
"metadata": {"tags": {"environment": "production", "region": "eu-west-1"}},
"response": {"status": {"code": 200, "message": "OK"}, "latency": 123.45},
}
})
)
insert_logs(
[
@@ -505,7 +477,9 @@ def test_logs_json_body_nested_keys(
assert len(results) == 1
rows = results[0]["rows"]
assert len(rows) == 2 # log1 and log3 have secure = true
secure_values = [json.loads(row["data"]["body"])["request"]["secure"] for row in rows]
secure_values = [
json.loads(row["data"]["body"])["request"]["secure"] for row in rows
]
assert all(secure is True for secure in secure_values)
# Test 3: Search by body.response.latency = 123.45
@@ -589,7 +563,9 @@ def test_logs_json_body_nested_keys(
assert len(results) == 1
rows = results[0]["rows"]
assert len(rows) == 2 # log1 and log3 have status.code = 200
status_codes = [json.loads(row["data"]["body"])["response"]["status"]["code"] for row in rows]
status_codes = [
json.loads(row["data"]["body"])["response"]["status"]["code"] for row in rows
]
assert all(code == 200 for code in status_codes)
@@ -610,35 +586,41 @@ def test_logs_json_body_array_membership(
"""
now = datetime.now(tz=timezone.utc)
log1_body = json.dumps({
"tags": ["production", "api", "critical"],
"ids": [100, 200, 300],
"flags": [True, False, True],
"users": [
{"name": "alice", "role": "admin"},
{"name": "bob", "role": "user"}
]
})
log1_body = json.dumps(
{
"tags": ["production", "api", "critical"],
"ids": [100, 200, 300],
"flags": [True, False, True],
"users": [
{"name": "alice", "role": "admin"},
{"name": "bob", "role": "user"},
],
}
)
log2_body = json.dumps({
"tags": ["staging", "api", "test"],
"ids": [200, 400, 500],
"flags": [False, False, True],
"users": [
{"name": "charlie", "role": "user"},
{"name": "david", "role": "admin"}
]
})
log2_body = json.dumps(
{
"tags": ["staging", "api", "test"],
"ids": [200, 400, 500],
"flags": [False, False, True],
"users": [
{"name": "charlie", "role": "user"},
{"name": "david", "role": "admin"},
],
}
)
log3_body = json.dumps({
"tags": ["production", "web", "important"],
"ids": [100, 600, 700],
"flags": [True, True, False],
"users": [
{"name": "alice", "role": "admin"},
{"name": "eve", "role": "user"}
]
})
log3_body = json.dumps(
{
"tags": ["production", "web", "important"],
"ids": [100, 600, 700],
"flags": [True, True, False],
"users": [
{"name": "alice", "role": "admin"},
{"name": "eve", "role": "user"},
],
}
)
insert_logs(
[
@@ -817,58 +799,68 @@ def test_logs_json_body_listing(
logs_data = [
{
"timestamp": now - timedelta(seconds=5),
"body": json.dumps({
"id": "log-1",
"service": "auth",
"action": "login",
"status": "success",
"user_id": 1
}),
"severity": "INFO"
"body": json.dumps(
{
"id": "log-1",
"service": "auth",
"action": "login",
"status": "success",
"user_id": 1,
}
),
"severity": "INFO",
},
{
"timestamp": now - timedelta(seconds=4),
"body": json.dumps({
"id": "log-2",
"service": "auth",
"action": "logout",
"status": "success",
"user_id": 2
}),
"severity": "INFO"
"body": json.dumps(
{
"id": "log-2",
"service": "auth",
"action": "logout",
"status": "success",
"user_id": 2,
}
),
"severity": "INFO",
},
{
"timestamp": now - timedelta(seconds=3),
"body": json.dumps({
"id": "log-3",
"service": "payment",
"action": "charge",
"status": "success",
"user_id": 1
}),
"severity": "INFO"
"body": json.dumps(
{
"id": "log-3",
"service": "payment",
"action": "charge",
"status": "success",
"user_id": 1,
}
),
"severity": "INFO",
},
{
"timestamp": now - timedelta(seconds=2),
"body": json.dumps({
"id": "log-4",
"service": "auth",
"action": "login",
"status": "failed",
"user_id": 3
}),
"severity": "ERROR"
"body": json.dumps(
{
"id": "log-4",
"service": "auth",
"action": "login",
"status": "failed",
"user_id": 3,
}
),
"severity": "ERROR",
},
{
"timestamp": now - timedelta(seconds=1),
"body": json.dumps({
"id": "log-5",
"service": "payment",
"action": "refund",
"status": "success",
"user_id": 2
}),
"severity": "INFO"
"body": json.dumps(
{
"id": "log-5",
"service": "payment",
"action": "refund",
"status": "success",
"user_id": 2,
}
),
"severity": "INFO",
},
]
@@ -1032,7 +1024,9 @@ def test_logs_json_body_listing(
"disabled": False,
"limit": 100,
"offset": 0,
"filter": {"expression": 'body.service = "auth" AND body.action = "login"'},
"filter": {
"expression": 'body.service = "auth" AND body.action = "login"'
},
"order": [
{"key": {"name": "timestamp"}, "direction": "desc"},
],
@@ -1076,7 +1070,9 @@ def test_logs_json_body_listing(
"disabled": False,
"limit": 100,
"offset": 0,
"filter": {"expression": 'body.service = "auth" OR body.service = "payment"'},
"filter": {
"expression": 'body.service = "auth" OR body.service = "payment"'
},
"order": [
{"key": {"name": "timestamp"}, "direction": "desc"},
],

1100
tests/integration/uv.lock generated Normal file

File diff suppressed because it is too large Load Diff