Adds tests/benchmarks/ with pytest-benchmark coverage of the hot codec paths and end-to-end SELECT/INSERT/pool/async round-trips. Establishes a committed baseline.json so PRs can be regression-checked at review via --benchmark-compare. * test_codec_perf.py (16): decode/encode_param/parse_tuple_payload micro-benchmarks - run without container, suitable for pre-merge CI. * test_select_perf.py (4): SELECT round-trips - 1-row latency floor, 10-row, 1k-row full fetch, parameterized. * test_insert_perf.py (3): single-row INSERT, executemany 100 / 1000. * test_pool_perf.py (3): cold connect, pool acquire/release, pool acquire + query + release. * test_async_perf.py (2): async round-trip overhead, 10x concurrent. * baseline.json: committed snapshot, 28 measurements. * benchmark pytest marker, gated off by default. * Makefile: bench / bench-codec / bench-save targets; test-integration excludes benchmarks for speed. Headline numbers (dev container loopback): * decode(int): 181 ns * parse_tuple 5 cols: 2.87 µs/row * SELECT 1 round-trip: 177 µs * Pool acquire+query+release: 295 µs * Cold connect: 11.2 ms (72x slower than pool) UTF-8 decode carries no measurable cost vs iso-8859-1 - confirms Phase 20 didn't regress anything. Total: 69 unit + 211 integration + 28 benchmark = 308 tests.
107 lines
4.1 KiB
Makefile
107 lines
4.1 KiB
Makefile
# informix-db — common dev commands
|
|
#
|
|
# uv-managed; run `make help` for the full target list.
|
|
|
|
# Image digest pinned in tests/docker-compose.yml; mirrored here for
|
|
# tab-complete-able commands like `make ifx-logs` that don't go through
|
|
# docker-compose.
|
|
IFX_CONTAINER ?= informix-db-test
|
|
|
|
.PHONY: help install lint format test test-integration test-all test-pdu \
|
|
ifx-up ifx-down ifx-logs ifx-shell ifx-status \
|
|
capture clean
|
|
|
|
help: ## Show this help
|
|
@grep -E '^[a-zA-Z_-]+:.*## ' $(MAKEFILE_LIST) \
|
|
| awk -F':.*## ' '{printf " %-20s %s\n", $$1, $$2}'
|
|
|
|
# ----------------------------------------------------------------------------
|
|
# Python / dev workflow
|
|
# ----------------------------------------------------------------------------
|
|
|
|
install: ## Sync dev dependencies (uv sync --extra dev)
|
|
uv sync --extra dev
|
|
|
|
lint: ## Run ruff
|
|
uv run ruff check src/ tests/
|
|
|
|
format: ## Auto-format with ruff
|
|
uv run ruff format src/ tests/
|
|
uv run ruff check src/ tests/ --fix
|
|
|
|
test: ## Run unit tests (no Docker required)
|
|
uv run pytest
|
|
|
|
test-integration: ## Run integration tests (needs Informix container; see `make ifx-up`). Excludes benchmarks; use `make bench` for those.
|
|
uv run pytest -m "integration and not benchmark"
|
|
|
|
test-all: ## Run unit + integration tests (no benchmarks; use `make bench` for those)
|
|
uv run pytest -m "not benchmark"
|
|
|
|
test-pdu: ## Run only the JDBC-vs-Python PDU regression test
|
|
uv run pytest tests/test_pdu_match.py -v
|
|
|
|
bench: ## Run all benchmarks (needs container for end-to-end; codec works standalone)
|
|
uv run pytest tests/benchmarks/ -m benchmark --benchmark-only \
|
|
--benchmark-columns=mean,stddev,ops,rounds \
|
|
--benchmark-sort=mean
|
|
|
|
bench-codec: ## Run codec micro-benchmarks only (no container required)
|
|
uv run pytest tests/benchmarks/test_codec_perf.py -m benchmark --benchmark-only \
|
|
--benchmark-columns=mean,stddev,ops,rounds \
|
|
--benchmark-sort=mean
|
|
|
|
bench-save: ## Save current bench run under .results/ (manual: copy to baseline.json)
|
|
uv run pytest tests/benchmarks/ -m benchmark --benchmark-only \
|
|
--benchmark-storage=tests/benchmarks/.results \
|
|
--benchmark-save=run
|
|
|
|
# ----------------------------------------------------------------------------
|
|
# Informix dev container
|
|
# ----------------------------------------------------------------------------
|
|
|
|
ifx-up: ## Start the Informix dev container (pinned by digest)
|
|
docker compose -f tests/docker-compose.yml up -d
|
|
@echo " Container: $(IFX_CONTAINER)"
|
|
@echo " Listener: 127.0.0.1:9088 (SQLI native)"
|
|
@echo " Login: informix / in4mix on database sysmaster"
|
|
|
|
ifx-down: ## Stop and remove the Informix container
|
|
docker compose -f tests/docker-compose.yml down
|
|
|
|
ifx-logs: ## Tail the container logs
|
|
docker logs -f $(IFX_CONTAINER)
|
|
|
|
ifx-shell: ## Drop into a shell inside the container
|
|
docker exec -it $(IFX_CONTAINER) bash
|
|
|
|
ifx-status: ## Check container health and listener readiness
|
|
@docker ps --filter name=$(IFX_CONTAINER) --format 'table {{.Names}}\t{{.Status}}\t{{.Ports}}'
|
|
@nc -zv 127.0.0.1 9088 2>&1 | head -1
|
|
|
|
# ----------------------------------------------------------------------------
|
|
# Phase 0 spike: re-capture wire traffic against the dev container
|
|
# ----------------------------------------------------------------------------
|
|
|
|
capture: ## Re-capture all three reference scenarios (JDBC) under socat
|
|
@for s in connect-only select-1 dml-cycle; do \
|
|
echo "=== $$s ==="; \
|
|
socat -d -d -x TCP-LISTEN:9090,reuseaddr TCP:127.0.0.1:9088 \
|
|
2>"docs/CAPTURES/$$s.socat.log" & \
|
|
SOCAT_PID=$$!; \
|
|
sleep 0.4; \
|
|
IFX_PORT=9090 java -cp "build/ifxjdbc.jar:build/" tests.reference.RefClient $$s; \
|
|
sleep 0.3; \
|
|
kill $$SOCAT_PID 2>/dev/null; \
|
|
wait 2>/dev/null; \
|
|
echo " → docs/CAPTURES/$$s.socat.log"; \
|
|
done
|
|
|
|
# ----------------------------------------------------------------------------
|
|
# Cleanup
|
|
# ----------------------------------------------------------------------------
|
|
|
|
clean: ## Remove build artifacts and caches (keeps captures and decompiled JDBC source)
|
|
rm -rf dist/ .pytest_cache/ .ruff_cache/ .mypy_cache/
|
|
find src tests -name __pycache__ -type d -exec rm -rf {} +
|