# informix-db — common dev commands # # uv-managed; run `make help` for the full target list. # Image digest pinned in tests/docker-compose.yml; mirrored here for # tab-complete-able commands like `make ifx-logs` that don't go through # docker-compose. IFX_CONTAINER ?= informix-db-test .PHONY: help install lint format test test-integration test-all test-pdu \ ifx-up ifx-down ifx-logs ifx-shell ifx-status \ capture clean help: ## Show this help @grep -E '^[a-zA-Z_-]+:.*## ' $(MAKEFILE_LIST) \ | awk -F':.*## ' '{printf " %-20s %s\n", $$1, $$2}' # ---------------------------------------------------------------------------- # Python / dev workflow # ---------------------------------------------------------------------------- install: ## Sync dev dependencies (uv sync --extra dev) uv sync --extra dev lint: ## Run ruff uv run ruff check src/ tests/ format: ## Auto-format with ruff uv run ruff format src/ tests/ uv run ruff check src/ tests/ --fix test: ## Run unit tests (no Docker required) uv run pytest test-integration: ## Run integration tests (needs Informix container; see `make ifx-up`). Excludes benchmarks; use `make bench` for those. uv run pytest -m "integration and not benchmark" test-all: ## Run unit + integration tests (no benchmarks; use `make bench` for those) uv run pytest -m "not benchmark" test-pdu: ## Run only the JDBC-vs-Python PDU regression test uv run pytest tests/test_pdu_match.py -v bench: ## Run all benchmarks (needs container for end-to-end; codec works standalone) uv run pytest tests/benchmarks/ -m benchmark --benchmark-only \ --benchmark-columns=mean,stddev,ops,rounds \ --benchmark-sort=mean bench-codec: ## Run codec micro-benchmarks only (no container required) uv run pytest tests/benchmarks/test_codec_perf.py -m benchmark --benchmark-only \ --benchmark-columns=mean,stddev,ops,rounds \ --benchmark-sort=mean bench-save: ## Save current bench run under .results/ (manual: copy to baseline.json) uv run pytest tests/benchmarks/ -m benchmark --benchmark-only \ --benchmark-storage=tests/benchmarks/.results \ --benchmark-save=run # ---------------------------------------------------------------------------- # Informix dev container # ---------------------------------------------------------------------------- ifx-up: ## Start the Informix dev container (pinned by digest) docker compose -f tests/docker-compose.yml up -d @echo " Container: $(IFX_CONTAINER)" @echo " Listener: 127.0.0.1:9088 (SQLI native)" @echo " Login: informix / in4mix on database sysmaster" ifx-down: ## Stop and remove the Informix container docker compose -f tests/docker-compose.yml down ifx-logs: ## Tail the container logs docker logs -f $(IFX_CONTAINER) ifx-shell: ## Drop into a shell inside the container docker exec -it $(IFX_CONTAINER) bash ifx-status: ## Check container health and listener readiness @docker ps --filter name=$(IFX_CONTAINER) --format 'table {{.Names}}\t{{.Status}}\t{{.Ports}}' @nc -zv 127.0.0.1 9088 2>&1 | head -1 # ---------------------------------------------------------------------------- # Phase 0 spike: re-capture wire traffic against the dev container # ---------------------------------------------------------------------------- capture: ## Re-capture all three reference scenarios (JDBC) under socat @for s in connect-only select-1 dml-cycle; do \ echo "=== $$s ==="; \ socat -d -d -x TCP-LISTEN:9090,reuseaddr TCP:127.0.0.1:9088 \ 2>"docs/CAPTURES/$$s.socat.log" & \ SOCAT_PID=$$!; \ sleep 0.4; \ IFX_PORT=9090 java -cp "build/ifxjdbc.jar:build/" tests.reference.RefClient $$s; \ sleep 0.3; \ kill $$SOCAT_PID 2>/dev/null; \ wait 2>/dev/null; \ echo " → docs/CAPTURES/$$s.socat.log"; \ done # ---------------------------------------------------------------------------- # Cleanup # ---------------------------------------------------------------------------- clean: ## Remove build artifacts and caches (keeps captures and decompiled JDBC source) rm -rf dist/ .pytest_cache/ .ruff_cache/ .mypy_cache/ find src tests -name __pycache__ -type d -exec rm -rf {} +