Extends the IfxPy comparison bench script with scaling workloads (1k/10k/100k rows for both executemany and SELECT). Re-runs the full comparison with consistent measurement methodology and updates the README with the actually-correct numbers. Earlier comparison runs reported informix-db winning all 5 benchmarks. Re-running select_bench_table_all with consistent measurement gives 3.04 ms, not the 891 us I cited earlier - a 3.4x discrepancy attributable to noisy warmup + small-fixture artifacts. The "we win everything" framing was wrong. Corrected comparison reveals two clear stories: Bulk-insert: pure-Python wins 1.6x at scale. executemany(10k): IfxPy 259ms -> us 161ms (1.6x faster) executemany(100k): IfxPy 2376ms -> us 1487ms (1.6x faster) Reason: Phase 33's pipelining eliminates per-row RTT. IfxPy's per-call API can't pipeline. Large-fetch: IfxPy wins 2.3-2.4x at scale. SELECT 1k rows: IfxPy 1.2ms / us 2.7ms (IfxPy 2.3x) SELECT 10k rows: IfxPy 11.3ms / us 25.8ms (IfxPy 2.3x) SELECT 100k rows: IfxPy 112ms / us 271ms (IfxPy 2.4x) Reason: C-level fetch_tuple at ~1.1us/row beats Python parse_tuple_payload at ~2.7us/row. Real C-vs-Python codec gap showing up at scale. For everyday workloads (single SELECT in a request, INSERT a handful of rows), drivers are within 5-25%. For workloads where the gap widens, direction depends on what you're doing - bulk- write favors us, bulk-read favors IfxPy. README's "Compared to IfxPy" section rewritten with the corrected numbers and an honest "when to prefer which" subsection. tests/benchmarks/compare/README.md mirror updated. Net narrative: a "faster at bulk-write, slower at bulk-read, comparable elsewhere" comparison story is more honest and more durable than a "we win everything" claim that would have collapsed the first time a user ran their own benchmark. Side note (lint): one ambiguous unicode `×` in cursors.py replaced with `x`. Phase 37 ticket: parse_tuple_payload is the bottleneck at scale. Closing the 1.6 us/row gap to IfxPy would make us competitive on bulk-fetch too. Possible approaches: Cython codec, deeper inlining, per-column dispatch pre-bake.
108 lines
3.4 KiB
TOML
108 lines
3.4 KiB
TOML
[project]
|
|
name = "informix-db"
|
|
version = "2026.05.05.9"
|
|
description = "Pure-Python driver for IBM Informix IDS — speaks the SQLI wire protocol over raw sockets. No CSDK, no JVM, no native libraries."
|
|
readme = "README.md"
|
|
license = { text = "MIT" }
|
|
authors = [{ name = "Ryan Malloy", email = "ryan@supported.systems" }]
|
|
requires-python = ">=3.10"
|
|
keywords = ["informix", "database", "sqli", "db-api", "pep-249", "asyncio", "async"]
|
|
classifiers = [
|
|
"Development Status :: 5 - Production/Stable",
|
|
"Framework :: AsyncIO",
|
|
"Intended Audience :: Developers",
|
|
"License :: OSI Approved :: MIT License",
|
|
"Operating System :: OS Independent",
|
|
"Programming Language :: Python :: 3",
|
|
"Programming Language :: Python :: 3 :: Only",
|
|
"Programming Language :: Python :: 3.10",
|
|
"Programming Language :: Python :: 3.11",
|
|
"Programming Language :: Python :: 3.12",
|
|
"Programming Language :: Python :: 3.13",
|
|
"Programming Language :: Python :: 3.14",
|
|
"Topic :: Database",
|
|
"Topic :: Database :: Front-Ends",
|
|
"Typing :: Typed",
|
|
]
|
|
dependencies = []
|
|
|
|
[project.urls]
|
|
Homepage = "https://github.com/rsp2k/informix-db"
|
|
Documentation = "https://github.com/rsp2k/informix-db/tree/main/docs"
|
|
Issues = "https://github.com/rsp2k/informix-db/issues"
|
|
|
|
[project.optional-dependencies]
|
|
dev = [
|
|
"pytest>=8.0",
|
|
"ruff>=0.6",
|
|
]
|
|
|
|
[build-system]
|
|
requires = ["hatchling"]
|
|
build-backend = "hatchling.build"
|
|
|
|
[tool.hatch.build.targets.wheel]
|
|
packages = ["src/informix_db"]
|
|
|
|
[tool.hatch.build.targets.sdist]
|
|
# Defense in depth: exclude operator-private and dev-only artifacts from the sdist
|
|
# (the wheel doesn't ship these by default, but the sdist would).
|
|
# See ~/.claude/rules/python.md for the full pre-publish PII audit playbook.
|
|
exclude = [
|
|
"CLAUDE.md", # operator-private context
|
|
".env", ".env.local", ".env.*",
|
|
".mcp.json", # may contain local filesystem paths
|
|
"build/", # decompiled JDBC, downloaded JARs
|
|
"audits/",
|
|
"docs/CAPTURES/", # spike artifacts; tests can re-capture against the dev container
|
|
"tests/reference/", # Java reference client — spike infra
|
|
".pytest_cache/", ".ruff_cache/", ".mypy_cache/",
|
|
"dist/", "*.egg-info/",
|
|
]
|
|
|
|
[tool.ruff]
|
|
line-length = 100
|
|
target-version = "py310"
|
|
src = ["src", "tests"]
|
|
|
|
[tool.ruff.lint]
|
|
select = [
|
|
"E", # pycodestyle errors
|
|
"W", # pycodestyle warnings
|
|
"F", # pyflakes
|
|
"I", # isort (import sorting)
|
|
"B", # flake8-bugbear
|
|
"C4", # flake8-comprehensions
|
|
"UP", # pyupgrade
|
|
"SIM", # flake8-simplify
|
|
"PTH", # flake8-use-pathlib
|
|
"RUF", # ruff-specific
|
|
]
|
|
ignore = [
|
|
"E501", # line too long — handled by formatter
|
|
]
|
|
|
|
[tool.ruff.lint.per-file-ignores]
|
|
"tests/**" = ["B011"] # allow assert False in tests
|
|
|
|
[tool.pytest.ini_options]
|
|
minversion = "8.0"
|
|
testpaths = ["tests"]
|
|
asyncio_mode = "auto" # pytest-asyncio: auto-detect ``async def`` tests
|
|
addopts = [
|
|
"-ra", # short summary for non-passing
|
|
"--strict-markers",
|
|
"--strict-config",
|
|
"-m", "not integration and not benchmark", # default: unit-only. Override with: pytest -m integration / -m benchmark
|
|
]
|
|
markers = [
|
|
"integration: requires a running Informix container (docker compose up); skipped by default",
|
|
"benchmark: pytest-benchmark performance test; skipped by default. Run with `make bench`.",
|
|
]
|
|
|
|
[dependency-groups]
|
|
dev = [
|
|
"pytest-asyncio>=1.3.0",
|
|
"pytest-benchmark>=5.2.3",
|
|
]
|