feat: Add Python MCP bridge and build tooling
Some checks are pending
Build Ghidra Plugin / build (push) Waiting to run
Some checks are pending
Build Ghidra Plugin / build (push) Waiting to run
- Add ghydramcp Python package with FastMCP server implementation - Add docker-compose.yml for easy container management - Add Makefile with build/run targets - Add QUICKSTART.md for getting started - Add uv.lock for reproducible dependencies
This commit is contained in:
parent
a3ad70d302
commit
28b81ff359
171
Makefile
Normal file
171
Makefile
Normal file
@ -0,0 +1,171 @@
|
|||||||
|
# GhydraMCP Makefile
|
||||||
|
# Convenient commands for Docker and development operations
|
||||||
|
|
||||||
|
.PHONY: help build build-dev up up-dev down down-dev logs logs-dev \
|
||||||
|
shell status clean analyze test health
|
||||||
|
|
||||||
|
# Default target
|
||||||
|
help:
|
||||||
|
@echo "GhydraMCP Docker Management"
|
||||||
|
@echo "============================"
|
||||||
|
@echo ""
|
||||||
|
@echo "Build commands:"
|
||||||
|
@echo " make build Build production Docker image"
|
||||||
|
@echo " make build-dev Build development Docker image"
|
||||||
|
@echo " make build-all Build both images"
|
||||||
|
@echo ""
|
||||||
|
@echo "Run commands:"
|
||||||
|
@echo " make up Start production container"
|
||||||
|
@echo " make up-dev Start development container"
|
||||||
|
@echo " make down Stop production container"
|
||||||
|
@echo " make down-dev Stop development container"
|
||||||
|
@echo " make down-all Stop all containers"
|
||||||
|
@echo ""
|
||||||
|
@echo "Analysis commands:"
|
||||||
|
@echo " make analyze FILE=path/to/binary Analyze a binary"
|
||||||
|
@echo " make analyze-bg FILE=path/to/binary Analyze in background"
|
||||||
|
@echo ""
|
||||||
|
@echo "Utility commands:"
|
||||||
|
@echo " make shell Start interactive shell in container"
|
||||||
|
@echo " make logs View production container logs"
|
||||||
|
@echo " make logs-dev View development container logs"
|
||||||
|
@echo " make status Check container status"
|
||||||
|
@echo " make health Check API health"
|
||||||
|
@echo " make clean Remove containers and volumes"
|
||||||
|
@echo " make clean-all Remove everything including images"
|
||||||
|
@echo ""
|
||||||
|
@echo "MCP Server commands:"
|
||||||
|
@echo " make mcp Start the MCP server (Python)"
|
||||||
|
@echo " make mcp-dev Start MCP server in development mode"
|
||||||
|
@echo ""
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Build Commands
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
build:
|
||||||
|
docker compose build ghydramcp
|
||||||
|
|
||||||
|
build-dev:
|
||||||
|
docker compose build ghydramcp-dev
|
||||||
|
|
||||||
|
build-all: build build-dev
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Run Commands
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
up:
|
||||||
|
docker compose --profile prod up -d ghydramcp
|
||||||
|
@echo "GhydraMCP starting... checking health in 30 seconds"
|
||||||
|
@sleep 30
|
||||||
|
@$(MAKE) health || echo "Server may still be starting up..."
|
||||||
|
|
||||||
|
up-dev:
|
||||||
|
docker compose --profile dev up -d ghydramcp-dev
|
||||||
|
@echo "GhydraMCP (dev) starting..."
|
||||||
|
|
||||||
|
down:
|
||||||
|
docker compose --profile prod down
|
||||||
|
|
||||||
|
down-dev:
|
||||||
|
docker compose --profile dev down
|
||||||
|
|
||||||
|
down-all:
|
||||||
|
docker compose --profile prod --profile dev --profile debug down
|
||||||
|
|
||||||
|
restart: down up
|
||||||
|
|
||||||
|
restart-dev: down-dev up-dev
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Analysis Commands
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Analyze a binary file
|
||||||
|
# Usage: make analyze FILE=/path/to/binary
|
||||||
|
analyze:
|
||||||
|
ifndef FILE
|
||||||
|
@echo "Error: FILE is required. Usage: make analyze FILE=/path/to/binary"
|
||||||
|
@exit 1
|
||||||
|
endif
|
||||||
|
@echo "Analyzing: $(FILE)"
|
||||||
|
docker compose run --rm -v "$(dir $(FILE)):/binaries:ro" ghydramcp /binaries/$(notdir $(FILE))
|
||||||
|
|
||||||
|
# Analyze in background (detached)
|
||||||
|
analyze-bg:
|
||||||
|
ifndef FILE
|
||||||
|
@echo "Error: FILE is required. Usage: make analyze-bg FILE=/path/to/binary"
|
||||||
|
@exit 1
|
||||||
|
endif
|
||||||
|
@echo "Starting background analysis of: $(FILE)"
|
||||||
|
docker compose run -d -v "$(dir $(FILE)):/binaries:ro" ghydramcp /binaries/$(notdir $(FILE))
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Utility Commands
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
shell:
|
||||||
|
docker compose --profile debug run --rm ghydramcp-shell
|
||||||
|
|
||||||
|
logs:
|
||||||
|
docker compose logs -f ghydramcp
|
||||||
|
|
||||||
|
logs-dev:
|
||||||
|
docker compose logs -f ghydramcp-dev
|
||||||
|
|
||||||
|
status:
|
||||||
|
@echo "=== Container Status ==="
|
||||||
|
@docker compose ps -a
|
||||||
|
@echo ""
|
||||||
|
@echo "=== Resource Usage ==="
|
||||||
|
@docker stats --no-stream $$(docker compose ps -q 2>/dev/null) 2>/dev/null || echo "No containers running"
|
||||||
|
|
||||||
|
health:
|
||||||
|
@echo "Checking GhydraMCP API health..."
|
||||||
|
@curl -sf http://localhost:$${GHYDRA_PORT:-8192}/ | python3 -m json.tool 2>/dev/null \
|
||||||
|
|| echo "API not responding (server may be starting or binary being analyzed)"
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Cleanup Commands
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
clean:
|
||||||
|
docker compose --profile prod --profile dev --profile debug down -v
|
||||||
|
@echo "Containers and volumes removed"
|
||||||
|
|
||||||
|
clean-all: clean
|
||||||
|
docker rmi ghydramcp:latest ghydramcp:dev 2>/dev/null || true
|
||||||
|
@echo "Images removed"
|
||||||
|
|
||||||
|
prune:
|
||||||
|
docker system prune -f
|
||||||
|
@echo "Docker system pruned"
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# MCP Server Commands
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
mcp:
|
||||||
|
uv run python -m ghydramcp
|
||||||
|
|
||||||
|
mcp-dev:
|
||||||
|
uv run python -m ghydramcp --verbose
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Development Commands
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
test:
|
||||||
|
uv run pytest tests/ -v
|
||||||
|
|
||||||
|
lint:
|
||||||
|
uv run ruff check src/
|
||||||
|
|
||||||
|
format:
|
||||||
|
uv run ruff format src/
|
||||||
|
|
||||||
|
# Check if binaries directory exists
|
||||||
|
check-binaries:
|
||||||
|
@mkdir -p binaries
|
||||||
|
@echo "Binaries directory ready at ./binaries/"
|
||||||
328
QUICKSTART.md
Normal file
328
QUICKSTART.md
Normal file
@ -0,0 +1,328 @@
|
|||||||
|
# GhydraMCP Quick Start Guide
|
||||||
|
|
||||||
|
## What is GhydraMCP?
|
||||||
|
|
||||||
|
GhydraMCP is a complete reverse engineering platform that combines:
|
||||||
|
- **Ghidra** - NSA's powerful binary analysis tool
|
||||||
|
- **Docker** - Containerized, reproducible analysis environment
|
||||||
|
- **HTTP REST API** - HATEOAS-compliant REST interface
|
||||||
|
- **MCP Server** - FastMCP-based Model Context Protocol integration
|
||||||
|
- **ARM Firmware Support** - Tools for analyzing raw embedded firmware
|
||||||
|
|
||||||
|
## 5-Minute Quick Start
|
||||||
|
|
||||||
|
### 1. Analyze a Standard Binary (ELF/PE/Mach-O)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd /home/rpm/claude/ghydramcp/GhydraMCP
|
||||||
|
|
||||||
|
# Build the Docker image (one time)
|
||||||
|
docker build -t ghydramcp:latest -f docker/Dockerfile .
|
||||||
|
|
||||||
|
# Analyze any standard binary
|
||||||
|
docker run -d --name my-analysis \
|
||||||
|
-p 8192:8192 \
|
||||||
|
-v $(pwd)/binaries:/binaries \
|
||||||
|
ghydramcp:latest \
|
||||||
|
/binaries/your-binary
|
||||||
|
|
||||||
|
# Wait ~20 seconds for analysis, then access HTTP API
|
||||||
|
curl http://localhost:8192/
|
||||||
|
curl http://localhost:8192/functions | jq '.functions[] | {name, address}'
|
||||||
|
curl http://localhost:8192/functions/<address>/decompile
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Analyze ARM Firmware (Raw Binary)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Step 1: Create ELF wrapper
|
||||||
|
python3 docker/arm_firmware_prep.py \
|
||||||
|
your-firmware.bin \
|
||||||
|
binaries/your-firmware.elf \
|
||||||
|
0x00000000
|
||||||
|
|
||||||
|
# Step 2: Analyze normally
|
||||||
|
docker run -d --name arm-firmware \
|
||||||
|
-p 8192:8192 \
|
||||||
|
-v $(pwd)/binaries:/binaries \
|
||||||
|
ghydramcp:latest \
|
||||||
|
/binaries/your-firmware.elf
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Use the MCP Server
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# The MCP server is located at:
|
||||||
|
cd /home/rpm/claude/ghydramcp/GhydraMCP
|
||||||
|
./launch.sh
|
||||||
|
|
||||||
|
# Or with uv:
|
||||||
|
cd GhydraMCP && uv run ghydramcp
|
||||||
|
```
|
||||||
|
|
||||||
|
## HTTP API Overview
|
||||||
|
|
||||||
|
Once analysis completes, the API is available at `http://localhost:8192/`:
|
||||||
|
|
||||||
|
### Core Endpoints
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Program information
|
||||||
|
GET /program
|
||||||
|
|
||||||
|
# Functions
|
||||||
|
GET /functions # List all functions
|
||||||
|
GET /functions/<address> # Function details
|
||||||
|
GET /functions/<address>/decompile # Decompiled C code
|
||||||
|
GET /functions/<address>/disassembly # Assembly listing
|
||||||
|
GET /functions/<address>/variables # Local variables
|
||||||
|
|
||||||
|
# Analysis
|
||||||
|
GET /analysis/callgraph?name=main&max_depth=3
|
||||||
|
GET /analysis/dataflow?address=<addr>&direction=forward
|
||||||
|
|
||||||
|
# Memory
|
||||||
|
GET /memory/<address>?length=256&format=hex
|
||||||
|
POST /memory/<address> # Write bytes
|
||||||
|
|
||||||
|
# Data & Structures
|
||||||
|
GET /data/strings
|
||||||
|
GET /structs
|
||||||
|
GET /xrefs?to_addr=<addr>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Response Format (HATEOAS)
|
||||||
|
|
||||||
|
All responses include navigation links:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"result": {
|
||||||
|
"name": "main",
|
||||||
|
"address": "00101380",
|
||||||
|
"signature": "int main(void)"
|
||||||
|
},
|
||||||
|
"_links": {
|
||||||
|
"self": "/functions/00101380",
|
||||||
|
"decompile": "/functions/00101380/decompile",
|
||||||
|
"disassembly": "/functions/00101380/disassembly"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## MCP Tools Overview
|
||||||
|
|
||||||
|
Use with Claude Code or any MCP client:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Functions
|
||||||
|
functions_list(port=8192, page_size=50)
|
||||||
|
functions_decompile(address="00101380", port=8192)
|
||||||
|
functions_get(name="main", port=8192)
|
||||||
|
|
||||||
|
# Analysis
|
||||||
|
analysis_get_callgraph(name="main", max_depth=3, port=8192)
|
||||||
|
analysis_get_dataflow(address="00101380", direction="forward", port=8192)
|
||||||
|
|
||||||
|
# Data
|
||||||
|
data_list_strings(port=8192, grep="password")
|
||||||
|
structs_list(port=8192)
|
||||||
|
|
||||||
|
# Docker Management
|
||||||
|
docker_status()
|
||||||
|
docker_start(binary_path="/path/to/binary", port=8192)
|
||||||
|
docker_stop(name_or_id="container-name")
|
||||||
|
docker_logs(name_or_id="container-name", tail=100)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Workflows
|
||||||
|
|
||||||
|
### Find Interesting Functions
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# List all functions
|
||||||
|
curl http://localhost:8192/functions | jq '.functions[].name'
|
||||||
|
|
||||||
|
# Search for crypto-related functions
|
||||||
|
curl http://localhost:8192/functions | jq '.functions[] | select(.name | test("crypt|hash|encrypt"; "i"))'
|
||||||
|
|
||||||
|
# Get call graph from main
|
||||||
|
curl 'http://localhost:8192/analysis/callgraph?name=main&max_depth=2' | jq .
|
||||||
|
```
|
||||||
|
|
||||||
|
### Analyze Strings
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# List all strings
|
||||||
|
curl http://localhost:8192/data/strings | jq '.strings[] | {address, value}'
|
||||||
|
|
||||||
|
# Find passwords/keys
|
||||||
|
curl http://localhost:8192/data/strings | jq '.strings[] | select(.value | test("password|key|secret"; "i"))'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Decompile Entry Point
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Get program entry point
|
||||||
|
ENTRY=$(curl -s http://localhost:8192/program | jq -r '.program.entryPoint')
|
||||||
|
|
||||||
|
# Decompile it
|
||||||
|
curl "http://localhost:8192/functions/$ENTRY/decompile" | jq -r '.result'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Docker Management
|
||||||
|
|
||||||
|
### List Running Containers
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker ps | grep ghydramcp
|
||||||
|
```
|
||||||
|
|
||||||
|
### View Logs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker logs -f my-analysis
|
||||||
|
```
|
||||||
|
|
||||||
|
### Stop Analysis
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker stop my-analysis
|
||||||
|
docker rm my-analysis
|
||||||
|
```
|
||||||
|
|
||||||
|
### Persistent Projects
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Mount project directory for persistence
|
||||||
|
docker run -d --name persistent \
|
||||||
|
-p 8192:8192 \
|
||||||
|
-v $(pwd)/projects:/projects \
|
||||||
|
-v $(pwd)/binaries:/binaries \
|
||||||
|
-e PROJECT_NAME=MyProject \
|
||||||
|
ghydramcp:latest \
|
||||||
|
/binaries/my-binary
|
||||||
|
|
||||||
|
# Projects are saved in ./projects/MyProject/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Import Failed
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check logs
|
||||||
|
docker logs my-analysis 2>&1 | grep ERROR
|
||||||
|
|
||||||
|
# Common issues:
|
||||||
|
# 1. Binary not found → Check volume mount path
|
||||||
|
# 2. AutoImporter failed → Use arm_firmware_prep.py for raw binaries
|
||||||
|
# 3. Unsupported format → Check file type with `file binary`
|
||||||
|
```
|
||||||
|
|
||||||
|
### Script Errors
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# If you see "Failed to get OSGi bundle" errors
|
||||||
|
# Fix script permissions in running container:
|
||||||
|
docker exec my-analysis sh -c 'chmod 644 /opt/ghidra/scripts/*.java'
|
||||||
|
|
||||||
|
# Then restart the analysis
|
||||||
|
```
|
||||||
|
|
||||||
|
### Port Already in Use
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Use different port
|
||||||
|
docker run -d --name analysis2 \
|
||||||
|
-p 8193:8192 \
|
||||||
|
-v $(pwd)/binaries:/binaries \
|
||||||
|
ghydramcp:latest \
|
||||||
|
/binaries/binary
|
||||||
|
|
||||||
|
# Access at http://localhost:8193/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
### Example 1: Analyze Test Binary
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create simple test binary
|
||||||
|
cat > test.c << 'EOF'
|
||||||
|
#include <stdio.h>
|
||||||
|
int secret_value = 0x42;
|
||||||
|
void hidden() { printf("Hidden: %d\n", secret_value); }
|
||||||
|
int main() { printf("Hello!\n"); return 0; }
|
||||||
|
EOF
|
||||||
|
|
||||||
|
gcc -o binaries/test test.c
|
||||||
|
|
||||||
|
# Analyze
|
||||||
|
docker run -d --name test-analysis \
|
||||||
|
-p 8192:8192 \
|
||||||
|
-v $(pwd)/binaries:/binaries \
|
||||||
|
ghydramcp:latest \
|
||||||
|
/binaries/test
|
||||||
|
|
||||||
|
# Find hidden function
|
||||||
|
sleep 15
|
||||||
|
curl http://localhost:8192/functions | jq '.functions[] | select(.name == "hidden")'
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 2: Cisco Phone Firmware
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Prepare firmware
|
||||||
|
python3 docker/arm_firmware_prep.py \
|
||||||
|
cisco-firmware/P003-8-12-00.bin \
|
||||||
|
binaries/cisco.elf \
|
||||||
|
0x00000000
|
||||||
|
|
||||||
|
# Analyze
|
||||||
|
docker run -d --name cisco \
|
||||||
|
-p 8192:8192 \
|
||||||
|
-v $(pwd)/binaries:/binaries \
|
||||||
|
ghydramcp:latest \
|
||||||
|
/binaries/cisco.elf
|
||||||
|
|
||||||
|
# Explore
|
||||||
|
sleep 30
|
||||||
|
curl http://localhost:8192/functions | jq '.functions | length' # Function count
|
||||||
|
curl http://localhost:8192/data/strings | jq '.strings[] | select(.value | test("SIP|RTP"))'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
- **Read ARM_FIRMWARE_SUCCESS.md** for ARM firmware details
|
||||||
|
- **Check docker/README_ARM_SOLUTION.md** for advanced ARM workflows
|
||||||
|
- **Explore MCP integration** with Claude Code
|
||||||
|
- **Build automations** using the HTTP API
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
GhydraMCP/
|
||||||
|
├── docker/
|
||||||
|
│ ├── Dockerfile # Main Docker image
|
||||||
|
│ ├── entrypoint.sh # Container entry point
|
||||||
|
│ ├── GhydraMCPServer.java # HTTP API server (1724 lines)
|
||||||
|
│ ├── ImportRawARM.java # Raw binary import script
|
||||||
|
│ ├── arm_firmware_prep.py # ELF wrapper tool ⭐
|
||||||
|
│ └── README*.md # Documentation
|
||||||
|
├── src/ghydramcp/ # MCP server implementation
|
||||||
|
│ ├── __init__.py
|
||||||
|
│ ├── server.py # FastMCP server
|
||||||
|
│ └── mixins/ # Modular functionality
|
||||||
|
│ ├── docker.py # Docker management
|
||||||
|
│ ├── instances.py # Instance registry
|
||||||
|
│ ├── functions.py # Function operations
|
||||||
|
│ ├── analysis.py # Analysis tools
|
||||||
|
│ └── ...
|
||||||
|
├── binaries/ # Binary files for analysis
|
||||||
|
├── projects/ # Ghidra project persistence
|
||||||
|
└── launch.sh # MCP server launcher
|
||||||
|
```
|
||||||
|
|
||||||
|
Happy reverse engineering! 🔍
|
||||||
116
docker-compose.yml
Normal file
116
docker-compose.yml
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
# GhydraMCP Docker Compose Configuration
|
||||||
|
# Provides both development and production modes for Ghidra + GhydraMCP
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# Development: docker compose up ghydramcp-dev
|
||||||
|
# Production: docker compose up ghydramcp
|
||||||
|
#
|
||||||
|
# Set MODE in .env file to switch between dev/prod behaviors
|
||||||
|
|
||||||
|
services:
|
||||||
|
# =============================================================================
|
||||||
|
# Production Service - Optimized for stability and security
|
||||||
|
# =============================================================================
|
||||||
|
ghydramcp:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: docker/Dockerfile
|
||||||
|
args:
|
||||||
|
GHIDRA_VERSION: ${GHIDRA_VERSION:-11.4.2}
|
||||||
|
GHIDRA_DATE: ${GHIDRA_DATE:-20250826}
|
||||||
|
image: ghydramcp:${GHYDRAMCP_VERSION:-latest}
|
||||||
|
container_name: ${COMPOSE_PROJECT_NAME:-ghydramcp}-server
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "${GHYDRA_PORT:-8192}:8192"
|
||||||
|
volumes:
|
||||||
|
# Mount binaries to analyze (read-only in prod)
|
||||||
|
- ${BINARIES_PATH:-./binaries}:/binaries:ro
|
||||||
|
# Persist Ghidra projects between runs
|
||||||
|
- ghydra-projects:/projects
|
||||||
|
environment:
|
||||||
|
- GHYDRA_MODE=${GHYDRA_MODE:-headless}
|
||||||
|
- GHYDRA_PORT=8192
|
||||||
|
- GHYDRA_MAXMEM=${GHYDRA_MAXMEM:-2G}
|
||||||
|
- PROJECT_NAME=${PROJECT_NAME:-GhydraMCP}
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8192/"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
start_period: 60s
|
||||||
|
retries: 3
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
memory: ${GHYDRA_MAXMEM:-2G}
|
||||||
|
profiles:
|
||||||
|
- prod
|
||||||
|
- default
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Development Service - Hot-reload and debugging friendly
|
||||||
|
# =============================================================================
|
||||||
|
ghydramcp-dev:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: docker/Dockerfile
|
||||||
|
args:
|
||||||
|
GHIDRA_VERSION: ${GHIDRA_VERSION:-11.4.2}
|
||||||
|
GHIDRA_DATE: ${GHIDRA_DATE:-20250826}
|
||||||
|
image: ghydramcp:dev
|
||||||
|
container_name: ${COMPOSE_PROJECT_NAME:-ghydramcp}-dev
|
||||||
|
ports:
|
||||||
|
- "${GHYDRA_PORT:-8192}:8192"
|
||||||
|
# Additional ports for debugging/multiple instances
|
||||||
|
- "8193:8193"
|
||||||
|
- "8194:8194"
|
||||||
|
volumes:
|
||||||
|
# Mount binaries (read-write in dev)
|
||||||
|
- ${BINARIES_PATH:-./binaries}:/binaries:rw
|
||||||
|
# Persist projects
|
||||||
|
- ghydra-projects-dev:/projects
|
||||||
|
# Mount scripts for live editing (development only)
|
||||||
|
- ./docker/GhydraMCPServer.java:/opt/ghidra/scripts/GhydraMCPServer.java:ro
|
||||||
|
- ./docker/entrypoint.sh:/entrypoint.sh:ro
|
||||||
|
environment:
|
||||||
|
- GHYDRA_MODE=${GHYDRA_MODE:-headless}
|
||||||
|
- GHYDRA_PORT=8192
|
||||||
|
- GHYDRA_MAXMEM=${GHYDRA_MAXMEM:-4G}
|
||||||
|
- PROJECT_NAME=${PROJECT_NAME:-GhydraMCP-Dev}
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8192/"]
|
||||||
|
interval: 15s
|
||||||
|
timeout: 5s
|
||||||
|
start_period: 120s
|
||||||
|
retries: 5
|
||||||
|
profiles:
|
||||||
|
- dev
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Shell Service - Interactive debugging container
|
||||||
|
# =============================================================================
|
||||||
|
ghydramcp-shell:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: docker/Dockerfile
|
||||||
|
image: ghydramcp:${GHYDRAMCP_VERSION:-latest}
|
||||||
|
container_name: ${COMPOSE_PROJECT_NAME:-ghydramcp}-shell
|
||||||
|
stdin_open: true
|
||||||
|
tty: true
|
||||||
|
volumes:
|
||||||
|
- ${BINARIES_PATH:-./binaries}:/binaries:rw
|
||||||
|
- ghydra-projects-dev:/projects
|
||||||
|
environment:
|
||||||
|
- GHYDRA_MODE=shell
|
||||||
|
profiles:
|
||||||
|
- debug
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
ghydra-projects:
|
||||||
|
name: ${COMPOSE_PROJECT_NAME:-ghydramcp}-projects
|
||||||
|
ghydra-projects-dev:
|
||||||
|
name: ${COMPOSE_PROJECT_NAME:-ghydramcp}-projects-dev
|
||||||
|
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
name: ${COMPOSE_PROJECT_NAME:-ghydramcp}-network
|
||||||
15
src/ghydramcp/__init__.py
Normal file
15
src/ghydramcp/__init__.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
"""GhydraMCP - AI-assisted reverse engineering bridge for Ghidra.
|
||||||
|
|
||||||
|
A multi-instance Ghidra plugin exposed via HATEOAS REST API plus an MCP
|
||||||
|
Python bridge for decompilation, analysis & binary manipulation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib.metadata import version
|
||||||
|
__version__ = version("ghydramcp")
|
||||||
|
except Exception:
|
||||||
|
__version__ = "2025.12.1"
|
||||||
|
|
||||||
|
from .server import create_server, main
|
||||||
|
|
||||||
|
__all__ = ["create_server", "main", "__version__"]
|
||||||
9
src/ghydramcp/__main__.py
Normal file
9
src/ghydramcp/__main__.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
"""GhydraMCP package entry point.
|
||||||
|
|
||||||
|
Allows running with: python -m ghydramcp
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .server import main
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
114
src/ghydramcp/config.py
Normal file
114
src/ghydramcp/config.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
"""Configuration management for GhydraMCP.
|
||||||
|
|
||||||
|
Handles environment variables, default settings, and runtime configuration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class DockerConfig:
|
||||||
|
"""Docker-specific configuration."""
|
||||||
|
|
||||||
|
# Docker image settings
|
||||||
|
image_name: str = "ghydramcp"
|
||||||
|
image_tag: str = field(default_factory=lambda: os.environ.get("GHYDRAMCP_VERSION", "latest"))
|
||||||
|
|
||||||
|
# Default container settings
|
||||||
|
default_port: int = field(default_factory=lambda: int(os.environ.get("GHYDRA_PORT", "8192")))
|
||||||
|
default_memory: str = field(default_factory=lambda: os.environ.get("GHYDRA_MAXMEM", "2G"))
|
||||||
|
|
||||||
|
# Project directory (for building)
|
||||||
|
project_dir: Optional[Path] = None
|
||||||
|
|
||||||
|
# Auto-start settings
|
||||||
|
auto_start_enabled: bool = field(default_factory=lambda: os.environ.get("GHYDRA_DOCKER_AUTO", "false").lower() == "true")
|
||||||
|
auto_start_wait: bool = True
|
||||||
|
auto_start_timeout: float = 300.0
|
||||||
|
|
||||||
|
|
||||||
|
# Docker configuration instance
|
||||||
|
_docker_config: Optional[DockerConfig] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_docker_config() -> DockerConfig:
|
||||||
|
"""Get the Docker configuration instance."""
|
||||||
|
global _docker_config
|
||||||
|
if _docker_config is None:
|
||||||
|
_docker_config = DockerConfig()
|
||||||
|
return _docker_config
|
||||||
|
|
||||||
|
|
||||||
|
def set_docker_config(config: DockerConfig) -> None:
|
||||||
|
"""Set the Docker configuration instance."""
|
||||||
|
global _docker_config
|
||||||
|
_docker_config = config
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class GhydraConfig:
|
||||||
|
"""Configuration for GhydraMCP server."""
|
||||||
|
|
||||||
|
# Ghidra connection settings
|
||||||
|
ghidra_host: str = field(default_factory=lambda: os.environ.get("GHIDRA_HOST", "localhost"))
|
||||||
|
default_port: Optional[int] = None
|
||||||
|
|
||||||
|
# Port scanning ranges for instance discovery
|
||||||
|
quick_discovery_range: range = field(default_factory=lambda: range(18489, 18499))
|
||||||
|
full_discovery_range: range = field(default_factory=lambda: range(18400, 18600))
|
||||||
|
|
||||||
|
# HTTP client settings
|
||||||
|
request_timeout: float = 30.0
|
||||||
|
discovery_timeout: float = 0.5
|
||||||
|
|
||||||
|
# Pagination defaults
|
||||||
|
default_page_size: int = 50
|
||||||
|
max_page_size: int = 500
|
||||||
|
|
||||||
|
# Cursor management
|
||||||
|
cursor_ttl_seconds: int = 300 # 5 minutes
|
||||||
|
max_cursors_per_session: int = 100
|
||||||
|
|
||||||
|
# Expected API version
|
||||||
|
expected_api_version: int = 2
|
||||||
|
|
||||||
|
# Resource caps for enumeration endpoints
|
||||||
|
resource_caps: dict = field(default_factory=lambda: {
|
||||||
|
"functions": 1000,
|
||||||
|
"strings": 500,
|
||||||
|
"data": 1000,
|
||||||
|
"structs": 500,
|
||||||
|
"xrefs": 500,
|
||||||
|
})
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
"""Validate configuration after initialization."""
|
||||||
|
if self.default_page_size > self.max_page_size:
|
||||||
|
self.default_page_size = self.max_page_size
|
||||||
|
|
||||||
|
|
||||||
|
# Global configuration instance (can be replaced for testing)
|
||||||
|
_config: Optional[GhydraConfig] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> GhydraConfig:
|
||||||
|
"""Get the global configuration instance."""
|
||||||
|
global _config
|
||||||
|
if _config is None:
|
||||||
|
_config = GhydraConfig()
|
||||||
|
return _config
|
||||||
|
|
||||||
|
|
||||||
|
def set_config(config: GhydraConfig) -> None:
|
||||||
|
"""Set the global configuration instance."""
|
||||||
|
global _config
|
||||||
|
_config = config
|
||||||
|
|
||||||
|
|
||||||
|
def reset_config() -> None:
|
||||||
|
"""Reset to default configuration."""
|
||||||
|
global _config
|
||||||
|
_config = None
|
||||||
58
src/ghydramcp/core/__init__.py
Normal file
58
src/ghydramcp/core/__init__.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
"""Core infrastructure for GhydraMCP.
|
||||||
|
|
||||||
|
Contains HTTP client, pagination, progress reporting, and logging utilities.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .http_client import (
|
||||||
|
safe_get,
|
||||||
|
safe_post,
|
||||||
|
safe_put,
|
||||||
|
safe_patch,
|
||||||
|
safe_delete,
|
||||||
|
simplify_response,
|
||||||
|
get_instance_url,
|
||||||
|
)
|
||||||
|
from .pagination import (
|
||||||
|
CursorManager,
|
||||||
|
CursorState,
|
||||||
|
paginate_response,
|
||||||
|
get_cursor_manager,
|
||||||
|
estimate_tokens,
|
||||||
|
)
|
||||||
|
from .progress import (
|
||||||
|
ProgressReporter,
|
||||||
|
report_progress,
|
||||||
|
report_step,
|
||||||
|
)
|
||||||
|
from .logging import (
|
||||||
|
log_info,
|
||||||
|
log_debug,
|
||||||
|
log_warning,
|
||||||
|
log_error,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
# HTTP client
|
||||||
|
"safe_get",
|
||||||
|
"safe_post",
|
||||||
|
"safe_put",
|
||||||
|
"safe_patch",
|
||||||
|
"safe_delete",
|
||||||
|
"simplify_response",
|
||||||
|
"get_instance_url",
|
||||||
|
# Pagination
|
||||||
|
"CursorManager",
|
||||||
|
"CursorState",
|
||||||
|
"paginate_response",
|
||||||
|
"get_cursor_manager",
|
||||||
|
"estimate_tokens",
|
||||||
|
# Progress
|
||||||
|
"ProgressReporter",
|
||||||
|
"report_progress",
|
||||||
|
"report_step",
|
||||||
|
# Logging
|
||||||
|
"log_info",
|
||||||
|
"log_debug",
|
||||||
|
"log_warning",
|
||||||
|
"log_error",
|
||||||
|
]
|
||||||
392
src/ghydramcp/core/http_client.py
Normal file
392
src/ghydramcp/core/http_client.py
Normal file
@ -0,0 +1,392 @@
|
|||||||
|
"""HTTP client for Ghidra REST API communication.
|
||||||
|
|
||||||
|
Provides safe request methods with error handling, HATEOAS compliance,
|
||||||
|
and response simplification for AI agent consumption.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
from typing import Any, Dict, Optional, Union
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from ..config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
# Allowed origins for CORS-like validation
|
||||||
|
ALLOWED_ORIGINS = {
|
||||||
|
"http://localhost",
|
||||||
|
"http://127.0.0.1",
|
||||||
|
"https://localhost",
|
||||||
|
"https://127.0.0.1",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def validate_origin(headers: Dict[str, str]) -> bool:
|
||||||
|
"""Validate request origin against allowed origins.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
headers: Request headers dict
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if origin is allowed or not present
|
||||||
|
"""
|
||||||
|
origin = headers.get("Origin")
|
||||||
|
if not origin:
|
||||||
|
# No origin header - allow (browser same-origin policy applies)
|
||||||
|
return True
|
||||||
|
|
||||||
|
try:
|
||||||
|
parsed = urlparse(origin)
|
||||||
|
origin_base = f"{parsed.scheme}://{parsed.hostname}"
|
||||||
|
if parsed.port:
|
||||||
|
origin_base += f":{parsed.port}"
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return origin_base in ALLOWED_ORIGINS
|
||||||
|
|
||||||
|
|
||||||
|
def get_instance_url(port: int, host: Optional[str] = None) -> str:
|
||||||
|
"""Get URL for a Ghidra instance by port.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Port number
|
||||||
|
host: Optional host override (defaults to config)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Full URL for the Ghidra instance
|
||||||
|
"""
|
||||||
|
if host is None:
|
||||||
|
host = get_config().ghidra_host
|
||||||
|
return f"http://{host}:{port}"
|
||||||
|
|
||||||
|
|
||||||
|
def _make_request(
|
||||||
|
method: str,
|
||||||
|
port: int,
|
||||||
|
endpoint: str,
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
json_data: Optional[Dict[str, Any]] = None,
|
||||||
|
data: Optional[str] = None,
|
||||||
|
headers: Optional[Dict[str, str]] = None,
|
||||||
|
host: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Make HTTP request to Ghidra instance with error handling.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
method: HTTP method (GET, POST, PUT, PATCH, DELETE)
|
||||||
|
port: Ghidra instance port
|
||||||
|
endpoint: API endpoint path
|
||||||
|
params: Query parameters
|
||||||
|
json_data: JSON payload for POST/PUT/PATCH
|
||||||
|
data: Raw text payload
|
||||||
|
headers: Additional headers
|
||||||
|
host: Optional host override
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict with success flag and result or error
|
||||||
|
"""
|
||||||
|
config = get_config()
|
||||||
|
url = f"{get_instance_url(port, host)}/{endpoint}"
|
||||||
|
|
||||||
|
# Set up headers for HATEOAS API
|
||||||
|
request_headers = {
|
||||||
|
"Accept": "application/json",
|
||||||
|
"X-Request-ID": f"mcp-bridge-{int(time.time() * 1000)}",
|
||||||
|
}
|
||||||
|
|
||||||
|
if headers:
|
||||||
|
request_headers.update(headers)
|
||||||
|
|
||||||
|
# Validate origin for state-changing requests
|
||||||
|
is_state_changing = method.upper() in ["POST", "PUT", "PATCH", "DELETE"]
|
||||||
|
if is_state_changing:
|
||||||
|
check_headers = (
|
||||||
|
json_data.get("headers", {})
|
||||||
|
if isinstance(json_data, dict)
|
||||||
|
else (headers or {})
|
||||||
|
)
|
||||||
|
if not validate_origin(check_headers):
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "ORIGIN_NOT_ALLOWED",
|
||||||
|
"message": "Origin not allowed for state-changing request",
|
||||||
|
},
|
||||||
|
"status_code": 403,
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
if json_data is not None:
|
||||||
|
request_headers["Content-Type"] = "application/json"
|
||||||
|
elif data is not None:
|
||||||
|
request_headers["Content-Type"] = "text/plain"
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.request(
|
||||||
|
method,
|
||||||
|
url,
|
||||||
|
params=params,
|
||||||
|
json=json_data,
|
||||||
|
data=data,
|
||||||
|
headers=request_headers,
|
||||||
|
timeout=config.request_timeout,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
parsed_json = response.json()
|
||||||
|
|
||||||
|
# Add timestamp if not present
|
||||||
|
if isinstance(parsed_json, dict) and "timestamp" not in parsed_json:
|
||||||
|
parsed_json["timestamp"] = int(time.time() * 1000)
|
||||||
|
|
||||||
|
# Normalize error format
|
||||||
|
if (
|
||||||
|
not response.ok
|
||||||
|
and isinstance(parsed_json, dict)
|
||||||
|
and "success" in parsed_json
|
||||||
|
and not parsed_json["success"]
|
||||||
|
):
|
||||||
|
if "error" in parsed_json and not isinstance(
|
||||||
|
parsed_json["error"], dict
|
||||||
|
):
|
||||||
|
error_message = parsed_json["error"]
|
||||||
|
parsed_json["error"] = {
|
||||||
|
"code": f"HTTP_{response.status_code}",
|
||||||
|
"message": error_message,
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed_json
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
if response.ok:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "NON_JSON_RESPONSE",
|
||||||
|
"message": "Received non-JSON success response",
|
||||||
|
},
|
||||||
|
"status_code": response.status_code,
|
||||||
|
"response_text": response.text[:500],
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": f"HTTP_{response.status_code}",
|
||||||
|
"message": f"Non-JSON error: {response.text[:100]}...",
|
||||||
|
},
|
||||||
|
"status_code": response.status_code,
|
||||||
|
"response_text": response.text[:500],
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {"code": "REQUEST_TIMEOUT", "message": "Request timed out"},
|
||||||
|
"status_code": 408,
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
except requests.exceptions.ConnectionError:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "CONNECTION_ERROR",
|
||||||
|
"message": f"Failed to connect to Ghidra instance at {url}",
|
||||||
|
},
|
||||||
|
"status_code": 503,
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "UNEXPECTED_ERROR",
|
||||||
|
"message": f"Unexpected error: {str(e)}",
|
||||||
|
},
|
||||||
|
"exception": e.__class__.__name__,
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def safe_get(
|
||||||
|
port: int, endpoint: str, params: Optional[Dict[str, Any]] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Make GET request to Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
endpoint: API endpoint path
|
||||||
|
params: Query parameters
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict
|
||||||
|
"""
|
||||||
|
return _make_request("GET", port, endpoint, params=params)
|
||||||
|
|
||||||
|
|
||||||
|
def safe_post(
|
||||||
|
port: int, endpoint: str, data: Union[Dict[str, Any], str]
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Make POST request to Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
endpoint: API endpoint path
|
||||||
|
data: JSON dict or raw string payload
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict
|
||||||
|
"""
|
||||||
|
headers = None
|
||||||
|
json_payload = None
|
||||||
|
text_payload = None
|
||||||
|
|
||||||
|
if isinstance(data, dict):
|
||||||
|
headers = data.pop("headers", None)
|
||||||
|
json_payload = data
|
||||||
|
else:
|
||||||
|
text_payload = data
|
||||||
|
|
||||||
|
return _make_request(
|
||||||
|
"POST", port, endpoint, json_data=json_payload, data=text_payload, headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def safe_put(port: int, endpoint: str, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Make PUT request to Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
endpoint: API endpoint path
|
||||||
|
data: JSON payload
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict
|
||||||
|
"""
|
||||||
|
headers = data.pop("headers", None) if isinstance(data, dict) else None
|
||||||
|
return _make_request("PUT", port, endpoint, json_data=data, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
|
def safe_patch(port: int, endpoint: str, data: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Make PATCH request to Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
endpoint: API endpoint path
|
||||||
|
data: JSON payload
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict
|
||||||
|
"""
|
||||||
|
headers = data.pop("headers", None) if isinstance(data, dict) else None
|
||||||
|
return _make_request("PATCH", port, endpoint, json_data=data, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
|
def safe_delete(port: int, endpoint: str) -> Dict[str, Any]:
|
||||||
|
"""Make DELETE request to Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
endpoint: API endpoint path
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict
|
||||||
|
"""
|
||||||
|
return _make_request("DELETE", port, endpoint)
|
||||||
|
|
||||||
|
|
||||||
|
def simplify_response(response: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
"""Simplify HATEOAS response for AI agent consumption.
|
||||||
|
|
||||||
|
- Removes _links from result entries
|
||||||
|
- Flattens nested structures
|
||||||
|
- Preserves important metadata
|
||||||
|
- Converts structured data to text
|
||||||
|
|
||||||
|
Args:
|
||||||
|
response: Raw API response
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Simplified response dict
|
||||||
|
"""
|
||||||
|
if not isinstance(response, dict):
|
||||||
|
return response
|
||||||
|
|
||||||
|
result = response.copy()
|
||||||
|
|
||||||
|
# Store API metadata
|
||||||
|
api_metadata = {}
|
||||||
|
for key in ["id", "instance", "timestamp", "size", "offset", "limit"]:
|
||||||
|
if key in result:
|
||||||
|
api_metadata[key] = result.get(key)
|
||||||
|
|
||||||
|
# Simplify result data
|
||||||
|
if "result" in result:
|
||||||
|
if isinstance(result["result"], list):
|
||||||
|
simplified_items = []
|
||||||
|
for item in result["result"]:
|
||||||
|
if isinstance(item, dict):
|
||||||
|
item_copy = item.copy()
|
||||||
|
links = item_copy.pop("_links", None)
|
||||||
|
if isinstance(links, dict):
|
||||||
|
for link_name, link_data in links.items():
|
||||||
|
if isinstance(link_data, dict) and "href" in link_data:
|
||||||
|
item_copy[f"{link_name}_url"] = link_data["href"]
|
||||||
|
simplified_items.append(item_copy)
|
||||||
|
else:
|
||||||
|
simplified_items.append(item)
|
||||||
|
result["result"] = simplified_items
|
||||||
|
|
||||||
|
elif isinstance(result["result"], dict):
|
||||||
|
result_copy = result["result"].copy()
|
||||||
|
links = result_copy.pop("_links", None)
|
||||||
|
|
||||||
|
if isinstance(links, dict):
|
||||||
|
for link_name, link_data in links.items():
|
||||||
|
if isinstance(link_data, dict) and "href" in link_data:
|
||||||
|
result_copy[f"{link_name}_url"] = link_data["href"]
|
||||||
|
|
||||||
|
# Convert disassembly to text
|
||||||
|
if "instructions" in result_copy and isinstance(
|
||||||
|
result_copy["instructions"], list
|
||||||
|
):
|
||||||
|
disasm_text = ""
|
||||||
|
for instr in result_copy["instructions"]:
|
||||||
|
if isinstance(instr, dict):
|
||||||
|
addr = instr.get("address", "")
|
||||||
|
mnemonic = instr.get("mnemonic", "")
|
||||||
|
operands = instr.get("operands", "")
|
||||||
|
bytes_str = instr.get("bytes", "")
|
||||||
|
disasm_text += (
|
||||||
|
f"{addr}: {bytes_str.ljust(10)} {mnemonic} {operands}\n"
|
||||||
|
)
|
||||||
|
result_copy["disassembly_text"] = disasm_text
|
||||||
|
result_copy.pop("instructions", None)
|
||||||
|
|
||||||
|
# Make decompiled code accessible
|
||||||
|
if "ccode" in result_copy:
|
||||||
|
result_copy["decompiled_text"] = result_copy["ccode"]
|
||||||
|
elif "decompiled" in result_copy:
|
||||||
|
result_copy["decompiled_text"] = result_copy["decompiled"]
|
||||||
|
|
||||||
|
result["result"] = result_copy
|
||||||
|
|
||||||
|
# Simplify top-level links
|
||||||
|
links = result.pop("_links", None)
|
||||||
|
if isinstance(links, dict):
|
||||||
|
api_links = {}
|
||||||
|
for link_name, link_data in links.items():
|
||||||
|
if isinstance(link_data, dict) and "href" in link_data:
|
||||||
|
api_links[link_name] = link_data["href"]
|
||||||
|
if api_links:
|
||||||
|
result["api_links"] = api_links
|
||||||
|
|
||||||
|
# Restore metadata
|
||||||
|
for key, value in api_metadata.items():
|
||||||
|
if key not in result:
|
||||||
|
result[key] = value
|
||||||
|
|
||||||
|
return result
|
||||||
88
src/ghydramcp/core/logging.py
Normal file
88
src/ghydramcp/core/logging.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
"""Logging utilities for MCP context-aware logging.
|
||||||
|
|
||||||
|
Provides async logging functions that use FastMCP's Context for
|
||||||
|
client-visible logging when available, with fallback to standard logging.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Optional, TYPE_CHECKING
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from mcp.server.fastmcp import Context
|
||||||
|
|
||||||
|
# Standard Python logger as fallback
|
||||||
|
logger = logging.getLogger("ghydramcp")
|
||||||
|
|
||||||
|
|
||||||
|
async def log_debug(ctx: Optional["Context"], message: str) -> None:
|
||||||
|
"""Log a debug message to the MCP client and/or standard logger.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context (may be None)
|
||||||
|
message: Debug message to log
|
||||||
|
"""
|
||||||
|
logger.debug(message)
|
||||||
|
if ctx is not None:
|
||||||
|
try:
|
||||||
|
await ctx.debug(message)
|
||||||
|
except Exception:
|
||||||
|
pass # Silently ignore if context doesn't support logging
|
||||||
|
|
||||||
|
|
||||||
|
async def log_info(ctx: Optional["Context"], message: str) -> None:
|
||||||
|
"""Log an info message to the MCP client and/or standard logger.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context (may be None)
|
||||||
|
message: Info message to log
|
||||||
|
"""
|
||||||
|
logger.info(message)
|
||||||
|
if ctx is not None:
|
||||||
|
try:
|
||||||
|
await ctx.info(message)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
async def log_warning(ctx: Optional["Context"], message: str) -> None:
|
||||||
|
"""Log a warning message to the MCP client and/or standard logger.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context (may be None)
|
||||||
|
message: Warning message to log
|
||||||
|
"""
|
||||||
|
logger.warning(message)
|
||||||
|
if ctx is not None:
|
||||||
|
try:
|
||||||
|
await ctx.warning(message)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
async def log_error(ctx: Optional["Context"], message: str) -> None:
|
||||||
|
"""Log an error message to the MCP client and/or standard logger.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context (may be None)
|
||||||
|
message: Error message to log
|
||||||
|
"""
|
||||||
|
logger.error(message)
|
||||||
|
if ctx is not None:
|
||||||
|
try:
|
||||||
|
await ctx.error(message)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def configure_logging(level: int = logging.INFO) -> None:
|
||||||
|
"""Configure the standard logger for GhydraMCP.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
level: Logging level (default: INFO)
|
||||||
|
"""
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
handler.setFormatter(
|
||||||
|
logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||||
|
)
|
||||||
|
logger.addHandler(handler)
|
||||||
|
logger.setLevel(level)
|
||||||
511
src/ghydramcp/core/pagination.py
Normal file
511
src/ghydramcp/core/pagination.py
Normal file
@ -0,0 +1,511 @@
|
|||||||
|
"""Cursor-based pagination system for large MCP responses.
|
||||||
|
|
||||||
|
Provides efficient pagination with grep filtering, session isolation,
|
||||||
|
and TTL-based cursor expiration.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
from collections import OrderedDict
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from threading import Lock
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
from ..config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
# ReDoS Protection Configuration
|
||||||
|
MAX_GREP_PATTERN_LENGTH = 500
|
||||||
|
MAX_GREP_REPETITION_OPS = 15
|
||||||
|
MAX_GREP_RECURSION_DEPTH = 10
|
||||||
|
|
||||||
|
# Token estimation (roughly 4 chars per token)
|
||||||
|
TOKEN_ESTIMATION_RATIO = 4.0
|
||||||
|
|
||||||
|
|
||||||
|
def compile_safe_pattern(pattern: str, flags: int = 0) -> re.Pattern:
|
||||||
|
"""Compile regex pattern with ReDoS protection.
|
||||||
|
|
||||||
|
Validates pattern to prevent catastrophic backtracking attacks.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pattern: Regex pattern string
|
||||||
|
flags: Regex compilation flags
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Compiled regex pattern
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If pattern fails safety validation
|
||||||
|
"""
|
||||||
|
if not pattern:
|
||||||
|
raise ValueError("Empty pattern")
|
||||||
|
|
||||||
|
if len(pattern) > MAX_GREP_PATTERN_LENGTH:
|
||||||
|
raise ValueError(
|
||||||
|
f"Pattern too long ({len(pattern)} chars, max {MAX_GREP_PATTERN_LENGTH}). "
|
||||||
|
"Consider using a simpler pattern."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Count repetition operators
|
||||||
|
repetition_ops = pattern.count("*") + pattern.count("+") + pattern.count("?")
|
||||||
|
repetition_ops += len(re.findall(r"\{[0-9,]+\}", pattern))
|
||||||
|
|
||||||
|
if repetition_ops > MAX_GREP_REPETITION_OPS:
|
||||||
|
raise ValueError(
|
||||||
|
f"Pattern has too many repetition operators ({repetition_ops}, "
|
||||||
|
f"max {MAX_GREP_REPETITION_OPS}). Consider simplifying."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check for dangerous nested quantifiers
|
||||||
|
dangerous_patterns = [
|
||||||
|
r"\([^)]*[*+][^)]*\)[*+]", # (a+)+ or (a*)*
|
||||||
|
r"\([^)]*[*+][^)]*\)\{", # (a+){n,m}
|
||||||
|
]
|
||||||
|
for dangerous in dangerous_patterns:
|
||||||
|
if re.search(dangerous, pattern):
|
||||||
|
raise ValueError(
|
||||||
|
"Pattern contains nested quantifiers which could cause "
|
||||||
|
"exponential backtracking. Consider simplifying."
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return re.compile(pattern, flags)
|
||||||
|
except re.error as e:
|
||||||
|
raise ValueError(f"Invalid regex pattern: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CursorState:
|
||||||
|
"""Represents the state of a paginated query with session isolation."""
|
||||||
|
|
||||||
|
cursor_id: str
|
||||||
|
session_id: str
|
||||||
|
tool_name: str
|
||||||
|
query_hash: str
|
||||||
|
data: List[Any]
|
||||||
|
total_count: int
|
||||||
|
filtered_count: int
|
||||||
|
current_offset: int = 0
|
||||||
|
page_size: int = 50
|
||||||
|
grep_pattern: Optional[str] = None
|
||||||
|
grep_flags: int = 0
|
||||||
|
created_at: float = field(default_factory=time.time)
|
||||||
|
last_accessed: float = field(default_factory=time.time)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_expired(self) -> bool:
|
||||||
|
config = get_config()
|
||||||
|
return time.time() - self.last_accessed > config.cursor_ttl_seconds
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_more(self) -> bool:
|
||||||
|
return self.current_offset + self.page_size < self.filtered_count
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_page(self) -> int:
|
||||||
|
return (self.current_offset // self.page_size) + 1
|
||||||
|
|
||||||
|
@property
|
||||||
|
def total_pages(self) -> int:
|
||||||
|
return max(1, (self.filtered_count + self.page_size - 1) // self.page_size)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ttl_remaining(self) -> int:
|
||||||
|
config = get_config()
|
||||||
|
return max(0, int(config.cursor_ttl_seconds - (time.time() - self.last_accessed)))
|
||||||
|
|
||||||
|
def verify_session(self, session_id: str) -> bool:
|
||||||
|
"""Verify cursor belongs to requesting session."""
|
||||||
|
return self.session_id == session_id
|
||||||
|
|
||||||
|
|
||||||
|
class CursorManager:
|
||||||
|
"""Thread-safe cursor manager with TTL-based expiration and session isolation."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._cursors: OrderedDict[str, CursorState] = OrderedDict()
|
||||||
|
self._session_cursors: Dict[str, set] = {}
|
||||||
|
self._lock = Lock()
|
||||||
|
|
||||||
|
def _generate_cursor_id(self, query_hash: str, session_id: str) -> str:
|
||||||
|
"""Generate a unique cursor ID."""
|
||||||
|
unique = f"{session_id}-{query_hash}-{time.time()}-{id(self)}"
|
||||||
|
return hashlib.sha256(unique.encode()).hexdigest()[:16]
|
||||||
|
|
||||||
|
def _cleanup_expired(self) -> None:
|
||||||
|
"""Remove expired cursors (call while holding lock)."""
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
expired = [cid for cid, state in self._cursors.items() if state.is_expired]
|
||||||
|
for cid in expired:
|
||||||
|
state = self._cursors[cid]
|
||||||
|
if state.session_id in self._session_cursors:
|
||||||
|
self._session_cursors[state.session_id].discard(cid)
|
||||||
|
del self._cursors[cid]
|
||||||
|
|
||||||
|
# LRU eviction
|
||||||
|
while len(self._cursors) > config.max_cursors_per_session:
|
||||||
|
oldest_id, oldest_state = self._cursors.popitem(last=False)
|
||||||
|
if oldest_state.session_id in self._session_cursors:
|
||||||
|
self._session_cursors[oldest_state.session_id].discard(oldest_id)
|
||||||
|
|
||||||
|
def create_cursor(
|
||||||
|
self,
|
||||||
|
data: List[Any],
|
||||||
|
query_params: Dict[str, Any],
|
||||||
|
tool_name: str = "unknown",
|
||||||
|
session_id: str = "default",
|
||||||
|
grep_pattern: Optional[str] = None,
|
||||||
|
grep_flags: int = 0,
|
||||||
|
page_size: int = 50,
|
||||||
|
) -> Tuple[str, CursorState]:
|
||||||
|
"""Create a new cursor for paginated results.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: The full result set to paginate
|
||||||
|
query_params: Original query parameters (for hashing)
|
||||||
|
tool_name: Name of tool creating cursor
|
||||||
|
session_id: Session identifier for isolation
|
||||||
|
grep_pattern: Optional regex pattern to filter results
|
||||||
|
grep_flags: Regex flags
|
||||||
|
page_size: Items per page
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (cursor_id, cursor_state)
|
||||||
|
"""
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
# Apply grep filtering
|
||||||
|
filtered_data = data
|
||||||
|
if grep_pattern:
|
||||||
|
pattern = compile_safe_pattern(grep_pattern, grep_flags)
|
||||||
|
filtered_data = [
|
||||||
|
item for item in data if self._matches_grep(item, pattern)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Create query hash
|
||||||
|
query_hash = hashlib.md5(
|
||||||
|
json.dumps(query_params, sort_keys=True, default=str).encode()
|
||||||
|
).hexdigest()[:12]
|
||||||
|
|
||||||
|
with self._lock:
|
||||||
|
self._cleanup_expired()
|
||||||
|
|
||||||
|
cursor_id = self._generate_cursor_id(query_hash, session_id)
|
||||||
|
state = CursorState(
|
||||||
|
cursor_id=cursor_id,
|
||||||
|
session_id=session_id,
|
||||||
|
tool_name=tool_name,
|
||||||
|
query_hash=query_hash,
|
||||||
|
data=filtered_data,
|
||||||
|
total_count=len(data),
|
||||||
|
filtered_count=len(filtered_data),
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep_pattern=grep_pattern,
|
||||||
|
grep_flags=grep_flags,
|
||||||
|
)
|
||||||
|
self._cursors[cursor_id] = state
|
||||||
|
|
||||||
|
if session_id not in self._session_cursors:
|
||||||
|
self._session_cursors[session_id] = set()
|
||||||
|
self._session_cursors[session_id].add(cursor_id)
|
||||||
|
|
||||||
|
return cursor_id, state
|
||||||
|
|
||||||
|
def get_cursor(
|
||||||
|
self, cursor_id: str, session_id: Optional[str] = None
|
||||||
|
) -> Optional[CursorState]:
|
||||||
|
"""Retrieve a cursor by ID, optionally validating session."""
|
||||||
|
with self._lock:
|
||||||
|
self._cleanup_expired()
|
||||||
|
|
||||||
|
if cursor_id not in self._cursors:
|
||||||
|
return None
|
||||||
|
|
||||||
|
state = self._cursors[cursor_id]
|
||||||
|
if state.is_expired:
|
||||||
|
del self._cursors[cursor_id]
|
||||||
|
if state.session_id in self._session_cursors:
|
||||||
|
self._session_cursors[state.session_id].discard(cursor_id)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if session_id and not state.verify_session(session_id):
|
||||||
|
return None
|
||||||
|
|
||||||
|
state.last_accessed = time.time()
|
||||||
|
self._cursors.move_to_end(cursor_id)
|
||||||
|
return state
|
||||||
|
|
||||||
|
def advance_cursor(
|
||||||
|
self, cursor_id: str, session_id: Optional[str] = None
|
||||||
|
) -> Optional[CursorState]:
|
||||||
|
"""Advance cursor to next page."""
|
||||||
|
with self._lock:
|
||||||
|
state = self._cursors.get(cursor_id)
|
||||||
|
if not state or state.is_expired:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if session_id and not state.verify_session(session_id):
|
||||||
|
return None
|
||||||
|
|
||||||
|
state.current_offset += state.page_size
|
||||||
|
state.last_accessed = time.time()
|
||||||
|
self._cursors.move_to_end(cursor_id)
|
||||||
|
return state
|
||||||
|
|
||||||
|
def delete_cursor(
|
||||||
|
self, cursor_id: str, session_id: Optional[str] = None
|
||||||
|
) -> bool:
|
||||||
|
"""Explicitly delete a cursor."""
|
||||||
|
with self._lock:
|
||||||
|
if cursor_id not in self._cursors:
|
||||||
|
return False
|
||||||
|
|
||||||
|
state = self._cursors[cursor_id]
|
||||||
|
if session_id and not state.verify_session(session_id):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if state.session_id in self._session_cursors:
|
||||||
|
self._session_cursors[state.session_id].discard(cursor_id)
|
||||||
|
del self._cursors[cursor_id]
|
||||||
|
return True
|
||||||
|
|
||||||
|
def delete_session_cursors(self, session_id: str) -> int:
|
||||||
|
"""Delete all cursors for a session."""
|
||||||
|
with self._lock:
|
||||||
|
if session_id not in self._session_cursors:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
cursor_ids = list(self._session_cursors[session_id])
|
||||||
|
count = 0
|
||||||
|
for cid in cursor_ids:
|
||||||
|
if cid in self._cursors:
|
||||||
|
del self._cursors[cid]
|
||||||
|
count += 1
|
||||||
|
del self._session_cursors[session_id]
|
||||||
|
return count
|
||||||
|
|
||||||
|
def get_page(self, state: CursorState) -> List[Any]:
|
||||||
|
"""Get current page of data from cursor state."""
|
||||||
|
start = state.current_offset
|
||||||
|
end = start + state.page_size
|
||||||
|
return state.data[start:end]
|
||||||
|
|
||||||
|
def _matches_grep(
|
||||||
|
self, item: Any, pattern: re.Pattern, depth: int = 0
|
||||||
|
) -> bool:
|
||||||
|
"""Check if an item matches the grep pattern.
|
||||||
|
|
||||||
|
Searches through string representations of dict values,
|
||||||
|
list items, or the item itself.
|
||||||
|
"""
|
||||||
|
if depth > MAX_GREP_RECURSION_DEPTH:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(item, dict):
|
||||||
|
for value in item.values():
|
||||||
|
if isinstance(value, str) and pattern.search(value):
|
||||||
|
return True
|
||||||
|
elif isinstance(value, (int, float)):
|
||||||
|
if pattern.search(str(value)):
|
||||||
|
return True
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
if self._matches_grep(value, pattern, depth + 1):
|
||||||
|
return True
|
||||||
|
elif isinstance(value, (list, tuple)):
|
||||||
|
if self._matches_grep(value, pattern, depth + 1):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
elif isinstance(item, (list, tuple)):
|
||||||
|
return any(self._matches_grep(i, pattern, depth + 1) for i in item)
|
||||||
|
elif isinstance(item, str):
|
||||||
|
return bool(pattern.search(item))
|
||||||
|
else:
|
||||||
|
return bool(pattern.search(str(item)))
|
||||||
|
|
||||||
|
def list_cursors(self, session_id: Optional[str] = None) -> List[Dict[str, Any]]:
|
||||||
|
"""List active cursors, optionally filtered by session."""
|
||||||
|
with self._lock:
|
||||||
|
self._cleanup_expired()
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"cursor_id": cid,
|
||||||
|
"session_id": state.session_id,
|
||||||
|
"tool_name": state.tool_name,
|
||||||
|
"total_count": state.total_count,
|
||||||
|
"filtered_count": state.filtered_count,
|
||||||
|
"current_page": state.current_page,
|
||||||
|
"total_pages": state.total_pages,
|
||||||
|
"current_offset": state.current_offset,
|
||||||
|
"page_size": state.page_size,
|
||||||
|
"has_more": state.has_more,
|
||||||
|
"grep_pattern": state.grep_pattern,
|
||||||
|
"age_seconds": int(time.time() - state.created_at),
|
||||||
|
"ttl_remaining": state.ttl_remaining,
|
||||||
|
}
|
||||||
|
for cid, state in self._cursors.items()
|
||||||
|
if session_id is None or state.session_id == session_id
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_stats(self) -> Dict[str, Any]:
|
||||||
|
"""Get cursor manager statistics."""
|
||||||
|
config = get_config()
|
||||||
|
with self._lock:
|
||||||
|
self._cleanup_expired()
|
||||||
|
return {
|
||||||
|
"total_cursors": len(self._cursors),
|
||||||
|
"total_sessions": len(self._session_cursors),
|
||||||
|
"max_cache_size": config.max_cursors_per_session,
|
||||||
|
"ttl_seconds": config.cursor_ttl_seconds,
|
||||||
|
"cursors_per_session": {
|
||||||
|
sid: len(cids) for sid, cids in self._session_cursors.items()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Global cursor manager instance
|
||||||
|
_cursor_manager: Optional[CursorManager] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_cursor_manager() -> CursorManager:
|
||||||
|
"""Get the global cursor manager instance."""
|
||||||
|
global _cursor_manager
|
||||||
|
if _cursor_manager is None:
|
||||||
|
_cursor_manager = CursorManager()
|
||||||
|
return _cursor_manager
|
||||||
|
|
||||||
|
|
||||||
|
def estimate_tokens(data: List[Any]) -> int:
|
||||||
|
"""Estimate token count for a list of items."""
|
||||||
|
text = json.dumps(data, default=str)
|
||||||
|
return int(len(text) / TOKEN_ESTIMATION_RATIO)
|
||||||
|
|
||||||
|
|
||||||
|
def paginate_response(
|
||||||
|
data: List[Any],
|
||||||
|
query_params: Dict[str, Any],
|
||||||
|
tool_name: str = "unknown",
|
||||||
|
session_id: str = "default",
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Create a paginated response with optional grep filtering.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Full result list to paginate
|
||||||
|
query_params: Original query parameters (for cursor creation)
|
||||||
|
tool_name: Name of the tool creating this response
|
||||||
|
session_id: Session identifier for cursor isolation
|
||||||
|
page_size: Items per page (default: 50, max: 500)
|
||||||
|
grep: Optional regex pattern to filter results
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Bypass pagination and return all results (with warning)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict with pagination metadata and results
|
||||||
|
"""
|
||||||
|
config = get_config()
|
||||||
|
cursor_manager = get_cursor_manager()
|
||||||
|
grep_flags = re.IGNORECASE if grep_ignorecase else 0
|
||||||
|
|
||||||
|
# Handle return_all bypass
|
||||||
|
if return_all:
|
||||||
|
filtered_data = data
|
||||||
|
if grep:
|
||||||
|
try:
|
||||||
|
pattern = compile_safe_pattern(grep, grep_flags)
|
||||||
|
filtered_data = [
|
||||||
|
item
|
||||||
|
for item in data
|
||||||
|
if cursor_manager._matches_grep(item, pattern)
|
||||||
|
]
|
||||||
|
except ValueError as e:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {"code": "INVALID_GREP_PATTERN", "message": str(e)},
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
|
||||||
|
estimated_tokens = estimate_tokens(filtered_data)
|
||||||
|
warning = None
|
||||||
|
|
||||||
|
if estimated_tokens > 50000:
|
||||||
|
warning = f"EXTREMELY LARGE response (~{estimated_tokens:,} tokens)"
|
||||||
|
elif estimated_tokens > 20000:
|
||||||
|
warning = f"VERY LARGE response (~{estimated_tokens:,} tokens)"
|
||||||
|
elif estimated_tokens > 8000:
|
||||||
|
warning = f"Large response (~{estimated_tokens:,} tokens)"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"result": filtered_data,
|
||||||
|
"pagination": {
|
||||||
|
"bypassed": True,
|
||||||
|
"total_count": len(data),
|
||||||
|
"filtered_count": len(filtered_data),
|
||||||
|
"grep_pattern": grep,
|
||||||
|
"estimated_tokens": estimated_tokens,
|
||||||
|
"warning": warning,
|
||||||
|
},
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Normal pagination flow
|
||||||
|
try:
|
||||||
|
cursor_id, state = cursor_manager.create_cursor(
|
||||||
|
data=data,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name=tool_name,
|
||||||
|
session_id=session_id,
|
||||||
|
grep_pattern=grep,
|
||||||
|
grep_flags=grep_flags,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {"code": "INVALID_GREP_PATTERN", "message": str(e)},
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
|
||||||
|
current_page = cursor_manager.get_page(state)
|
||||||
|
response_cursor = cursor_id if state.has_more else None
|
||||||
|
|
||||||
|
response = {
|
||||||
|
"success": True,
|
||||||
|
"result": current_page,
|
||||||
|
"pagination": {
|
||||||
|
"cursor_id": response_cursor,
|
||||||
|
"session_id": session_id,
|
||||||
|
"total_count": state.total_count,
|
||||||
|
"filtered_count": state.filtered_count,
|
||||||
|
"page_size": state.page_size,
|
||||||
|
"current_page": state.current_page,
|
||||||
|
"total_pages": state.total_pages,
|
||||||
|
"has_more": state.has_more,
|
||||||
|
"grep_pattern": grep,
|
||||||
|
"items_returned": len(current_page),
|
||||||
|
},
|
||||||
|
"timestamp": int(time.time() * 1000),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add LLM-friendly continuation message
|
||||||
|
if state.has_more:
|
||||||
|
remaining = state.filtered_count - (state.current_page * state.page_size)
|
||||||
|
response["_message"] = (
|
||||||
|
f"Showing {len(current_page)} of {state.filtered_count} items "
|
||||||
|
f"(page {state.current_page}/{state.total_pages}). "
|
||||||
|
f"To get the next {min(state.page_size, remaining)} items, call: "
|
||||||
|
f"cursor_next(cursor_id='{cursor_id}')"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
response["_message"] = (
|
||||||
|
f"Complete: {len(current_page)} items returned (all results)"
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
161
src/ghydramcp/core/progress.py
Normal file
161
src/ghydramcp/core/progress.py
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
"""Progress reporting utilities for long-running operations.
|
||||||
|
|
||||||
|
Provides async progress reporting using FastMCP's Context for
|
||||||
|
real-time progress notifications to MCP clients.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Optional, TYPE_CHECKING
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from mcp.server.fastmcp import Context
|
||||||
|
|
||||||
|
|
||||||
|
class ProgressReporter:
|
||||||
|
"""Helper class for reporting progress during long operations.
|
||||||
|
|
||||||
|
Provides throttled progress updates to avoid spamming the client
|
||||||
|
with too many notifications.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
async def long_operation(ctx: Context):
|
||||||
|
progress = ProgressReporter(ctx, "Scanning", total=100)
|
||||||
|
for i in range(100):
|
||||||
|
await progress.update(message=f"Processing item {i}")
|
||||||
|
await progress.complete("Scan finished")
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
ctx: Optional["Context"],
|
||||||
|
operation: str,
|
||||||
|
total: int = 100
|
||||||
|
):
|
||||||
|
"""Initialize the progress reporter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context for progress reporting (may be None)
|
||||||
|
operation: Name of the operation (used in log messages)
|
||||||
|
total: Total number of steps (default: 100)
|
||||||
|
"""
|
||||||
|
self.ctx = ctx
|
||||||
|
self.operation = operation
|
||||||
|
self.total = total
|
||||||
|
self.current = 0
|
||||||
|
self._last_reported = 0
|
||||||
|
# Report every 5% at minimum
|
||||||
|
self._report_threshold = max(1, total // 20)
|
||||||
|
|
||||||
|
async def update(
|
||||||
|
self,
|
||||||
|
progress: Optional[int] = None,
|
||||||
|
message: Optional[str] = None
|
||||||
|
) -> None:
|
||||||
|
"""Update progress, reporting to client if threshold reached.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
progress: Current progress value (if None, increments by 1)
|
||||||
|
message: Optional message to log with the progress update
|
||||||
|
"""
|
||||||
|
if progress is not None:
|
||||||
|
self.current = progress
|
||||||
|
else:
|
||||||
|
self.current += 1
|
||||||
|
|
||||||
|
# Only report if we've crossed a threshold or reached the end
|
||||||
|
should_report = (
|
||||||
|
self.current - self._last_reported >= self._report_threshold
|
||||||
|
or self.current >= self.total
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.ctx and should_report:
|
||||||
|
try:
|
||||||
|
await self.ctx.report_progress(
|
||||||
|
progress=self.current,
|
||||||
|
total=self.total
|
||||||
|
)
|
||||||
|
if message:
|
||||||
|
await self.ctx.info(f"{self.operation}: {message}")
|
||||||
|
self._last_reported = self.current
|
||||||
|
except Exception:
|
||||||
|
pass # Silently ignore if context doesn't support progress
|
||||||
|
|
||||||
|
async def info(self, message: str) -> None:
|
||||||
|
"""Send an info message to the client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
message: Message to send
|
||||||
|
"""
|
||||||
|
if self.ctx:
|
||||||
|
try:
|
||||||
|
await self.ctx.info(f"{self.operation}: {message}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def complete(self, message: Optional[str] = None) -> None:
|
||||||
|
"""Mark operation as complete.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
message: Optional completion message (supports format placeholders:
|
||||||
|
{count}, {total}, {operation})
|
||||||
|
"""
|
||||||
|
self.current = self.total
|
||||||
|
if self.ctx:
|
||||||
|
try:
|
||||||
|
await self.ctx.report_progress(
|
||||||
|
progress=self.total,
|
||||||
|
total=self.total
|
||||||
|
)
|
||||||
|
if message:
|
||||||
|
formatted = message.format(
|
||||||
|
count=self.current,
|
||||||
|
total=self.total,
|
||||||
|
operation=self.operation
|
||||||
|
)
|
||||||
|
await self.ctx.info(formatted)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
async def report_progress(
|
||||||
|
ctx: Optional["Context"],
|
||||||
|
progress: int,
|
||||||
|
total: int,
|
||||||
|
message: Optional[str] = None
|
||||||
|
) -> None:
|
||||||
|
"""Convenience function for one-off progress updates.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context (may be None)
|
||||||
|
progress: Current progress value
|
||||||
|
total: Total progress value
|
||||||
|
message: Optional message to log
|
||||||
|
"""
|
||||||
|
if ctx:
|
||||||
|
try:
|
||||||
|
await ctx.report_progress(progress=progress, total=total)
|
||||||
|
if message:
|
||||||
|
await ctx.info(message)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
async def report_step(
|
||||||
|
ctx: Optional["Context"],
|
||||||
|
step: int,
|
||||||
|
total_steps: int,
|
||||||
|
description: str
|
||||||
|
) -> None:
|
||||||
|
"""Report a discrete step in a multi-step operation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context (may be None)
|
||||||
|
step: Current step number (1-indexed)
|
||||||
|
total_steps: Total number of steps
|
||||||
|
description: Description of the current step
|
||||||
|
"""
|
||||||
|
if ctx:
|
||||||
|
try:
|
||||||
|
await ctx.report_progress(progress=step, total=total_steps)
|
||||||
|
await ctx.info(f"Step {step}/{total_steps}: {description}")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
29
src/ghydramcp/mixins/__init__.py
Normal file
29
src/ghydramcp/mixins/__init__.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
"""MCP Mixins for GhydraMCP.
|
||||||
|
|
||||||
|
Domain-specific mixins that organize tools, resources, and prompts by functionality.
|
||||||
|
Uses FastMCP's contrib.mcp_mixin pattern for clean modular organization.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .base import GhydraMixinBase
|
||||||
|
from .instances import InstancesMixin
|
||||||
|
from .functions import FunctionsMixin
|
||||||
|
from .data import DataMixin
|
||||||
|
from .structs import StructsMixin
|
||||||
|
from .analysis import AnalysisMixin
|
||||||
|
from .memory import MemoryMixin
|
||||||
|
from .xrefs import XrefsMixin
|
||||||
|
from .cursors import CursorsMixin
|
||||||
|
from .docker import DockerMixin
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"GhydraMixinBase",
|
||||||
|
"InstancesMixin",
|
||||||
|
"FunctionsMixin",
|
||||||
|
"DataMixin",
|
||||||
|
"StructsMixin",
|
||||||
|
"AnalysisMixin",
|
||||||
|
"MemoryMixin",
|
||||||
|
"XrefsMixin",
|
||||||
|
"CursorsMixin",
|
||||||
|
"DockerMixin",
|
||||||
|
]
|
||||||
356
src/ghydramcp/mixins/analysis.py
Normal file
356
src/ghydramcp/mixins/analysis.py
Normal file
@ -0,0 +1,356 @@
|
|||||||
|
"""Analysis mixin for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools for program analysis operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from fastmcp import Context
|
||||||
|
from fastmcp.contrib.mcp_mixin import mcp_tool
|
||||||
|
|
||||||
|
from .base import GhydraMixinBase
|
||||||
|
from ..config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
class AnalysisMixin(GhydraMixinBase):
|
||||||
|
"""Mixin for analysis operations.
|
||||||
|
|
||||||
|
Provides tools for:
|
||||||
|
- Running program analysis
|
||||||
|
- Call graph analysis
|
||||||
|
- Data flow analysis
|
||||||
|
- UI state queries
|
||||||
|
- Comment management
|
||||||
|
"""
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def analysis_run(
|
||||||
|
self,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
analysis_options: Optional[Dict[str, Any]] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Run analysis on the current program.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
analysis_options: Analysis options to enable/disable
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Analysis operation result
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
response = self.safe_post(port, "analysis", analysis_options or {})
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def analysis_get_callgraph(
|
||||||
|
self,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
address: Optional[str] = None,
|
||||||
|
max_depth: int = 3,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get function call graph with edge pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Starting function name (mutually exclusive with address)
|
||||||
|
address: Starting function address
|
||||||
|
max_depth: Maximum call depth (default: 3)
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Edges per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter edges
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all edges without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Call graph with paginated edges
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
params = {"max_depth": max_depth}
|
||||||
|
if address:
|
||||||
|
params["address"] = address
|
||||||
|
func_id = address
|
||||||
|
elif name:
|
||||||
|
params["name"] = name
|
||||||
|
func_id = name
|
||||||
|
else:
|
||||||
|
func_id = "entry_point"
|
||||||
|
|
||||||
|
response = self.safe_get(port, "analysis/callgraph", params)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
result = simplified.get("result", {})
|
||||||
|
edges = result.get("edges", []) if isinstance(result, dict) else []
|
||||||
|
nodes = result.get("nodes", []) if isinstance(result, dict) else []
|
||||||
|
|
||||||
|
if not edges:
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
"tool": "analysis_get_callgraph",
|
||||||
|
"port": port,
|
||||||
|
"name": name,
|
||||||
|
"address": address,
|
||||||
|
"max_depth": max_depth,
|
||||||
|
"grep": grep,
|
||||||
|
}
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
paginated = self.paginate_response(
|
||||||
|
data=edges,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="analysis_get_callgraph",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
if paginated.get("success"):
|
||||||
|
paginated["result"] = {
|
||||||
|
"root_function": func_id,
|
||||||
|
"max_depth": max_depth,
|
||||||
|
"nodes": nodes,
|
||||||
|
"edges": paginated.get("result", []),
|
||||||
|
"total_nodes": len(nodes),
|
||||||
|
}
|
||||||
|
|
||||||
|
return paginated
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def analysis_get_dataflow(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
direction: str = "forward",
|
||||||
|
max_steps: int = 50,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Perform data flow analysis with step pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Starting address in hex format
|
||||||
|
direction: "forward" or "backward" (default: "forward")
|
||||||
|
max_steps: Maximum analysis steps (default: 50)
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Steps per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter steps
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all steps without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Data flow steps with pagination
|
||||||
|
"""
|
||||||
|
if not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"address": address,
|
||||||
|
"direction": direction,
|
||||||
|
"max_steps": max_steps,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = self.safe_get(port, "analysis/dataflow", params)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
result = simplified.get("result", {})
|
||||||
|
steps = result.get("steps", []) if isinstance(result, dict) else []
|
||||||
|
|
||||||
|
if not steps:
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
"tool": "analysis_get_dataflow",
|
||||||
|
"port": port,
|
||||||
|
"address": address,
|
||||||
|
"direction": direction,
|
||||||
|
"max_steps": max_steps,
|
||||||
|
"grep": grep,
|
||||||
|
}
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
paginated = self.paginate_response(
|
||||||
|
data=steps,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="analysis_get_dataflow",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
if paginated.get("success"):
|
||||||
|
paginated["result"] = {
|
||||||
|
"start_address": address,
|
||||||
|
"direction": direction,
|
||||||
|
"steps": paginated.get("result", []),
|
||||||
|
}
|
||||||
|
if isinstance(result, dict):
|
||||||
|
for key in ["sources", "sinks", "total_steps"]:
|
||||||
|
if key in result:
|
||||||
|
paginated["result"][key] = result[key]
|
||||||
|
|
||||||
|
return paginated
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def ui_get_current_address(self, port: Optional[int] = None) -> Dict[str, Any]:
|
||||||
|
"""Get the address currently selected in Ghidra's UI.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Current address information
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
response = self.safe_get(port, "address")
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def ui_get_current_function(self, port: Optional[int] = None) -> Dict[str, Any]:
|
||||||
|
"""Get the function currently selected in Ghidra's UI.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Current function information
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
response = self.safe_get(port, "function")
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def comments_set(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
comment: str = "",
|
||||||
|
comment_type: str = "plate",
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Set a comment at the specified address.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Memory address in hex format
|
||||||
|
comment: Comment text (empty string removes comment)
|
||||||
|
comment_type: "plate", "pre", "post", "eol", "repeatable"
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {"comment": comment}
|
||||||
|
response = self.safe_post(port, f"memory/{address}/comments/{comment_type}", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def functions_set_comment(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
comment: str = "",
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Set a decompiler-friendly comment (function comment with fallback).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Memory address (preferably function entry point)
|
||||||
|
comment: Comment text (empty string removes comment)
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
# Try setting as function comment first
|
||||||
|
try:
|
||||||
|
payload = {"comment": comment}
|
||||||
|
response = self.safe_patch(port, f"functions/{address}", payload)
|
||||||
|
if response.get("success", False):
|
||||||
|
return self.simplify_response(response)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Fallback to pre-comment
|
||||||
|
return self.comments_set(
|
||||||
|
address=address,
|
||||||
|
comment=comment,
|
||||||
|
comment_type="pre",
|
||||||
|
port=port,
|
||||||
|
)
|
||||||
240
src/ghydramcp/mixins/base.py
Normal file
240
src/ghydramcp/mixins/base.py
Normal file
@ -0,0 +1,240 @@
|
|||||||
|
"""Base mixin class for GhydraMCP domain mixins.
|
||||||
|
|
||||||
|
Provides shared state and utilities for all domain mixins.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
from threading import Lock
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from fastmcp import Context
|
||||||
|
from fastmcp.contrib.mcp_mixin import MCPMixin
|
||||||
|
|
||||||
|
from ..config import get_config
|
||||||
|
from ..core.http_client import safe_get, safe_post, safe_put, safe_patch, safe_delete, simplify_response
|
||||||
|
from ..core.pagination import get_cursor_manager, paginate_response
|
||||||
|
from ..core.logging import log_info, log_debug, log_warning, log_error
|
||||||
|
|
||||||
|
|
||||||
|
class GhydraMixinBase(MCPMixin):
|
||||||
|
"""Base class for GhydraMCP domain mixins.
|
||||||
|
|
||||||
|
Provides shared instance state and common utilities.
|
||||||
|
All domain mixins should inherit from this class.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Shared state across all mixins
|
||||||
|
_instances: Dict[int, Dict[str, Any]] = {}
|
||||||
|
_instances_lock = Lock()
|
||||||
|
_current_port: Optional[int] = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize the mixin with shared state."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_current_port(cls) -> Optional[int]:
|
||||||
|
"""Get the current working instance port."""
|
||||||
|
return cls._current_port
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def set_current_port(cls, port: int) -> None:
|
||||||
|
"""Set the current working instance port."""
|
||||||
|
cls._current_port = port
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_instance_port(cls, port: Optional[int] = None) -> int:
|
||||||
|
"""Get instance port, using current if not specified.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Explicit port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Port number to use
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If no port specified and no current instance set
|
||||||
|
"""
|
||||||
|
if port is not None:
|
||||||
|
return port
|
||||||
|
if cls._current_port is not None:
|
||||||
|
return cls._current_port
|
||||||
|
config = get_config()
|
||||||
|
# Try default port
|
||||||
|
default_port = config.quick_discovery_range.start
|
||||||
|
if default_port in cls._instances:
|
||||||
|
return default_port
|
||||||
|
raise ValueError(
|
||||||
|
"No Ghidra instance specified. Use instances_use(port) to set a working instance, "
|
||||||
|
"or pass port= parameter explicitly."
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_instance(cls, port: int, url: Optional[str] = None) -> str:
|
||||||
|
"""Register a Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Port number
|
||||||
|
url: Optional URL override
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Status message
|
||||||
|
"""
|
||||||
|
config = get_config()
|
||||||
|
if url is None:
|
||||||
|
url = f"http://{config.ghidra_host}:{port}"
|
||||||
|
|
||||||
|
# Verify instance is responsive
|
||||||
|
try:
|
||||||
|
response = safe_get(port, "")
|
||||||
|
if not response.get("success", False):
|
||||||
|
return f"Failed to connect to Ghidra instance on port {port}"
|
||||||
|
|
||||||
|
# Check API version
|
||||||
|
api_version = response.get("api_version", 0)
|
||||||
|
if api_version < config.expected_api_version:
|
||||||
|
return (
|
||||||
|
f"API version mismatch: got {api_version}, "
|
||||||
|
f"expected {config.expected_api_version}"
|
||||||
|
)
|
||||||
|
|
||||||
|
with cls._instances_lock:
|
||||||
|
cls._instances[port] = {
|
||||||
|
"url": url,
|
||||||
|
"project": response.get("project", ""),
|
||||||
|
"file": response.get("file", ""),
|
||||||
|
"registered_at": time.time(),
|
||||||
|
}
|
||||||
|
|
||||||
|
return f"Registered Ghidra instance on port {port}"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return f"Error registering instance: {e}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def unregister_instance(cls, port: int) -> str:
|
||||||
|
"""Unregister a Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Port number
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Status message
|
||||||
|
"""
|
||||||
|
with cls._instances_lock:
|
||||||
|
if port in cls._instances:
|
||||||
|
del cls._instances[port]
|
||||||
|
if cls._current_port == port:
|
||||||
|
cls._current_port = None
|
||||||
|
return f"Unregistered Ghidra instance on port {port}"
|
||||||
|
return f"No instance registered on port {port}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def list_instances(cls) -> Dict[int, Dict[str, Any]]:
|
||||||
|
"""Get all registered instances.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict mapping port to instance info
|
||||||
|
"""
|
||||||
|
with cls._instances_lock:
|
||||||
|
return dict(cls._instances)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_instance_info(cls, port: int) -> Optional[Dict[str, Any]]:
|
||||||
|
"""Get info for a specific instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Port number
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Instance info dict or None
|
||||||
|
"""
|
||||||
|
with cls._instances_lock:
|
||||||
|
return cls._instances.get(port)
|
||||||
|
|
||||||
|
def _get_session_id(self, ctx: Optional[Context]) -> str:
|
||||||
|
"""Extract session ID from FastMCP context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Session identifier string
|
||||||
|
"""
|
||||||
|
if ctx is None:
|
||||||
|
return "default"
|
||||||
|
|
||||||
|
# Try various context attributes
|
||||||
|
if hasattr(ctx, "session") and ctx.session:
|
||||||
|
return str(ctx.session)
|
||||||
|
if hasattr(ctx, "client_id") and ctx.client_id:
|
||||||
|
return str(ctx.client_id)
|
||||||
|
if hasattr(ctx, "request_id") and ctx.request_id:
|
||||||
|
return f"req-{ctx.request_id}"
|
||||||
|
|
||||||
|
return "default"
|
||||||
|
|
||||||
|
# Convenience methods for subclasses
|
||||||
|
def safe_get(self, port: int, endpoint: str, params: Optional[Dict] = None) -> Dict:
|
||||||
|
"""Make GET request to Ghidra instance."""
|
||||||
|
return safe_get(port, endpoint, params)
|
||||||
|
|
||||||
|
def safe_post(self, port: int, endpoint: str, data: Any) -> Dict:
|
||||||
|
"""Make POST request to Ghidra instance."""
|
||||||
|
return safe_post(port, endpoint, data)
|
||||||
|
|
||||||
|
def safe_put(self, port: int, endpoint: str, data: Dict) -> Dict:
|
||||||
|
"""Make PUT request to Ghidra instance."""
|
||||||
|
return safe_put(port, endpoint, data)
|
||||||
|
|
||||||
|
def safe_patch(self, port: int, endpoint: str, data: Dict) -> Dict:
|
||||||
|
"""Make PATCH request to Ghidra instance."""
|
||||||
|
return safe_patch(port, endpoint, data)
|
||||||
|
|
||||||
|
def safe_delete(self, port: int, endpoint: str) -> Dict:
|
||||||
|
"""Make DELETE request to Ghidra instance."""
|
||||||
|
return safe_delete(port, endpoint)
|
||||||
|
|
||||||
|
def simplify_response(self, response: Dict) -> Dict:
|
||||||
|
"""Simplify HATEOAS response."""
|
||||||
|
return simplify_response(response)
|
||||||
|
|
||||||
|
def paginate_response(
|
||||||
|
self,
|
||||||
|
data: list,
|
||||||
|
query_params: Dict,
|
||||||
|
tool_name: str,
|
||||||
|
session_id: str = "default",
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
) -> Dict:
|
||||||
|
"""Create paginated response."""
|
||||||
|
return paginate_response(
|
||||||
|
data=data,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name=tool_name,
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=page_size,
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Async logging helpers
|
||||||
|
async def log_info(self, ctx: Optional[Context], message: str) -> None:
|
||||||
|
"""Log info message."""
|
||||||
|
await log_info(ctx, message)
|
||||||
|
|
||||||
|
async def log_debug(self, ctx: Optional[Context], message: str) -> None:
|
||||||
|
"""Log debug message."""
|
||||||
|
await log_debug(ctx, message)
|
||||||
|
|
||||||
|
async def log_warning(self, ctx: Optional[Context], message: str) -> None:
|
||||||
|
"""Log warning message."""
|
||||||
|
await log_warning(ctx, message)
|
||||||
|
|
||||||
|
async def log_error(self, ctx: Optional[Context], message: str) -> None:
|
||||||
|
"""Log error message."""
|
||||||
|
await log_error(ctx, message)
|
||||||
174
src/ghydramcp/mixins/cursors.py
Normal file
174
src/ghydramcp/mixins/cursors.py
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
"""Cursor management mixin for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools for managing pagination cursors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from fastmcp import Context
|
||||||
|
from fastmcp.contrib.mcp_mixin import mcp_tool
|
||||||
|
|
||||||
|
from .base import GhydraMixinBase
|
||||||
|
from ..core.pagination import get_cursor_manager
|
||||||
|
|
||||||
|
|
||||||
|
class CursorsMixin(GhydraMixinBase):
|
||||||
|
"""Mixin for cursor management.
|
||||||
|
|
||||||
|
Provides tools for navigating paginated results.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def cursor_next(
|
||||||
|
self, cursor_id: str, ctx: Optional[Context] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get the next page of results for a cursor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cursor_id: The cursor identifier from a previous paginated response
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Next page of results with updated pagination info
|
||||||
|
"""
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
cursor_manager = get_cursor_manager()
|
||||||
|
|
||||||
|
# Get and advance cursor
|
||||||
|
state = cursor_manager.get_cursor(cursor_id, session_id)
|
||||||
|
if not state:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "CURSOR_NOT_FOUND",
|
||||||
|
"message": f"Cursor '{cursor_id}' not found or expired. "
|
||||||
|
"Cursors expire after 5 minutes of inactivity.",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# Advance to next page
|
||||||
|
state = cursor_manager.advance_cursor(cursor_id, session_id)
|
||||||
|
if not state:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "CURSOR_ADVANCE_FAILED",
|
||||||
|
"message": "Failed to advance cursor",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
current_page = cursor_manager.get_page(state)
|
||||||
|
response_cursor = cursor_id if state.has_more else None
|
||||||
|
|
||||||
|
response = {
|
||||||
|
"success": True,
|
||||||
|
"result": current_page,
|
||||||
|
"pagination": {
|
||||||
|
"cursor_id": response_cursor,
|
||||||
|
"session_id": session_id,
|
||||||
|
"total_count": state.total_count,
|
||||||
|
"filtered_count": state.filtered_count,
|
||||||
|
"page_size": state.page_size,
|
||||||
|
"current_page": state.current_page,
|
||||||
|
"total_pages": state.total_pages,
|
||||||
|
"has_more": state.has_more,
|
||||||
|
"grep_pattern": state.grep_pattern,
|
||||||
|
"items_returned": len(current_page),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if state.has_more:
|
||||||
|
remaining = state.filtered_count - (state.current_page * state.page_size)
|
||||||
|
response["_message"] = (
|
||||||
|
f"Showing {len(current_page)} of {state.filtered_count} items "
|
||||||
|
f"(page {state.current_page}/{state.total_pages}). "
|
||||||
|
f"To get the next {min(state.page_size, remaining)} items, call: "
|
||||||
|
f"cursor_next(cursor_id='{cursor_id}')"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
response["_message"] = (
|
||||||
|
f"Complete: {len(current_page)} items returned (final page)"
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def cursor_list(
|
||||||
|
self, ctx: Optional[Context] = None, all_sessions: bool = False
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""List active cursors for the current session.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
all_sessions: Include cursors from all sessions (admin only)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of active cursors with their status
|
||||||
|
"""
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
cursor_manager = get_cursor_manager()
|
||||||
|
|
||||||
|
if all_sessions:
|
||||||
|
cursors = cursor_manager.list_cursors()
|
||||||
|
else:
|
||||||
|
cursors = cursor_manager.list_cursors(session_id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"cursors": cursors,
|
||||||
|
"session_id": session_id,
|
||||||
|
"count": len(cursors),
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def cursor_delete(
|
||||||
|
self, cursor_id: str, ctx: Optional[Context] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Delete a specific cursor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cursor_id: The cursor identifier to delete
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Confirmation of deletion
|
||||||
|
"""
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
cursor_manager = get_cursor_manager()
|
||||||
|
|
||||||
|
deleted = cursor_manager.delete_cursor(cursor_id, session_id)
|
||||||
|
|
||||||
|
if deleted:
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": f"Cursor '{cursor_id}' deleted",
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "CURSOR_NOT_FOUND",
|
||||||
|
"message": f"Cursor '{cursor_id}' not found or belongs to another session",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def cursor_delete_all(self, ctx: Optional[Context] = None) -> Dict[str, Any]:
|
||||||
|
"""Delete all cursors for the current session.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of cursors deleted
|
||||||
|
"""
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
cursor_manager = get_cursor_manager()
|
||||||
|
|
||||||
|
count = cursor_manager.delete_session_cursors(session_id)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"message": f"Deleted {count} cursor(s) for session",
|
||||||
|
"deleted_count": count,
|
||||||
|
}
|
||||||
384
src/ghydramcp/mixins/data.py
Normal file
384
src/ghydramcp/mixins/data.py
Normal file
@ -0,0 +1,384 @@
|
|||||||
|
"""Data mixin for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools for data items and strings operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from fastmcp import Context
|
||||||
|
from fastmcp.contrib.mcp_mixin import mcp_tool, mcp_resource
|
||||||
|
|
||||||
|
from .base import GhydraMixinBase
|
||||||
|
from ..config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
class DataMixin(GhydraMixinBase):
|
||||||
|
"""Mixin for data operations.
|
||||||
|
|
||||||
|
Provides tools for:
|
||||||
|
- Listing and searching data items
|
||||||
|
- Creating and modifying data
|
||||||
|
- Working with strings
|
||||||
|
- Setting data types
|
||||||
|
"""
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def data_list(
|
||||||
|
self,
|
||||||
|
addr: Optional[str] = None,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
name_contains: Optional[str] = None,
|
||||||
|
type: Optional[str] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""List defined data items with filtering and cursor-based pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
addr: Filter by address (hexadecimal)
|
||||||
|
name: Exact name match filter (case-sensitive)
|
||||||
|
name_contains: Substring name filter (case-insensitive)
|
||||||
|
type: Filter by data type (e.g. "string", "dword")
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Items per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter results
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all results without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Data items with pagination metadata
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
params = {"offset": 0, "limit": 10000}
|
||||||
|
if addr:
|
||||||
|
params["addr"] = addr
|
||||||
|
if name:
|
||||||
|
params["name"] = name
|
||||||
|
if name_contains:
|
||||||
|
params["name_contains"] = name_contains
|
||||||
|
if type:
|
||||||
|
params["type"] = type
|
||||||
|
|
||||||
|
response = self.safe_get(port, "data", params)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
all_data = simplified.get("result", [])
|
||||||
|
if not isinstance(all_data, list):
|
||||||
|
all_data = []
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
"tool": "data_list",
|
||||||
|
"port": port,
|
||||||
|
"addr": addr,
|
||||||
|
"name": name,
|
||||||
|
"name_contains": name_contains,
|
||||||
|
"type": type,
|
||||||
|
"grep": grep,
|
||||||
|
}
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
return self.paginate_response(
|
||||||
|
data=all_data,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="data_list",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def data_list_strings(
|
||||||
|
self,
|
||||||
|
filter: Optional[str] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""List all defined strings in the binary with pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filter: Server-side string content filter
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Items per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter results (e.g., "password|key")
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all strings without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of strings with pagination info
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
fetch_limit = 10000 if return_all else max(page_size * 10, 2000)
|
||||||
|
|
||||||
|
params = {"offset": 0, "limit": fetch_limit}
|
||||||
|
if filter:
|
||||||
|
params["filter"] = filter
|
||||||
|
|
||||||
|
response = self.safe_get(port, "strings", params)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
result_data = simplified.get("result", [])
|
||||||
|
if not isinstance(result_data, list):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
"tool": "data_list_strings",
|
||||||
|
"port": port,
|
||||||
|
"filter": filter,
|
||||||
|
"grep": grep,
|
||||||
|
}
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
return self.paginate_response(
|
||||||
|
data=result_data,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="data_list_strings",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def data_create(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
data_type: str,
|
||||||
|
size: Optional[int] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Define a new data item at the specified address.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Memory address in hex format
|
||||||
|
data_type: Data type (e.g. "string", "dword", "byte")
|
||||||
|
size: Optional size in bytes
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result with created data information
|
||||||
|
"""
|
||||||
|
if not address or not data_type:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Address and data_type parameters are required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {"address": address, "type": data_type}
|
||||||
|
if size is not None:
|
||||||
|
payload["size"] = size
|
||||||
|
|
||||||
|
response = self.safe_post(port, "data", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def data_rename(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
name: str,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Rename a data item.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Memory address in hex format
|
||||||
|
name: New name for the data item
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not address or not name:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Address and name parameters are required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {"address": address, "newName": name}
|
||||||
|
response = self.safe_post(port, "data", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def data_delete(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Delete data at the specified address.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Memory address in hex format
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {"address": address, "action": "delete"}
|
||||||
|
response = self.safe_post(port, "data/delete", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def data_set_type(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
data_type: str,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Set the data type of a data item.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Memory address in hex format
|
||||||
|
data_type: Data type name (e.g. "uint32_t", "char[10]")
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not address or not data_type:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Address and data_type parameters are required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {"address": address, "type": data_type}
|
||||||
|
response = self.safe_post(port, "data/type", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
# Resources
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}/strings")
|
||||||
|
def resource_strings_list(self, port: Optional[int] = None) -> Dict[str, Any]:
|
||||||
|
"""MCP Resource: List strings (capped).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of strings (capped at 1000)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
cap = config.resource_caps.get("strings", 1000)
|
||||||
|
|
||||||
|
response = self.safe_get(port, "strings", {"limit": cap})
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
strings = simplified.get("result", [])
|
||||||
|
if not isinstance(strings, list):
|
||||||
|
strings = []
|
||||||
|
|
||||||
|
return {
|
||||||
|
"strings": strings[:cap],
|
||||||
|
"count": len(strings),
|
||||||
|
"capped_at": cap if len(strings) >= cap else None,
|
||||||
|
"_hint": "Use data_list_strings() tool for full pagination" if len(strings) >= cap else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}/data")
|
||||||
|
def resource_data_list(self, port: Optional[int] = None) -> Dict[str, Any]:
|
||||||
|
"""MCP Resource: List data items (capped).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of data items (capped at 1000)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
cap = config.resource_caps.get("data", 1000)
|
||||||
|
|
||||||
|
response = self.safe_get(port, "data", {"limit": cap})
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
data_items = simplified.get("result", [])
|
||||||
|
if not isinstance(data_items, list):
|
||||||
|
data_items = []
|
||||||
|
|
||||||
|
return {
|
||||||
|
"data": data_items[:cap],
|
||||||
|
"count": len(data_items),
|
||||||
|
"capped_at": cap if len(data_items) >= cap else None,
|
||||||
|
"_hint": "Use data_list() tool for full pagination" if len(data_items) >= cap else None,
|
||||||
|
}
|
||||||
656
src/ghydramcp/mixins/docker.py
Normal file
656
src/ghydramcp/mixins/docker.py
Normal file
@ -0,0 +1,656 @@
|
|||||||
|
"""Docker management mixin for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools for managing Ghidra Docker containers programmatically.
|
||||||
|
Allows the MCP server to automatically start containers when Ghidra isn't available.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastmcp import Context
|
||||||
|
from fastmcp.contrib.mcp_mixin import MCPMixin, mcp_tool
|
||||||
|
|
||||||
|
from ..config import get_config, get_docker_config
|
||||||
|
|
||||||
|
|
||||||
|
class DockerMixin(MCPMixin):
|
||||||
|
"""Docker container management for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools to start, stop, and manage Ghidra containers
|
||||||
|
with the GhydraMCP plugin pre-installed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Track running containers
|
||||||
|
_containers: Dict[str, Dict[str, Any]] = {}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize Docker mixin."""
|
||||||
|
self._check_docker_available()
|
||||||
|
|
||||||
|
def _check_docker_available(self) -> bool:
|
||||||
|
"""Check if Docker is available on the system."""
|
||||||
|
return shutil.which("docker") is not None
|
||||||
|
|
||||||
|
def _run_docker_cmd(
|
||||||
|
self, args: List[str], check: bool = True, capture: bool = True
|
||||||
|
) -> subprocess.CompletedProcess:
|
||||||
|
"""Run a docker command.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
args: Command arguments (after 'docker')
|
||||||
|
check: Raise exception on non-zero exit
|
||||||
|
capture: Capture stdout/stderr
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
CompletedProcess result
|
||||||
|
"""
|
||||||
|
cmd = ["docker"] + args
|
||||||
|
return subprocess.run(
|
||||||
|
cmd,
|
||||||
|
check=check,
|
||||||
|
capture_output=capture,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _run_compose_cmd(
|
||||||
|
self,
|
||||||
|
args: List[str],
|
||||||
|
project_dir: Optional[Path] = None,
|
||||||
|
check: bool = True,
|
||||||
|
capture: bool = True,
|
||||||
|
) -> subprocess.CompletedProcess:
|
||||||
|
"""Run a docker compose command.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
args: Command arguments (after 'docker compose')
|
||||||
|
project_dir: Directory containing docker-compose.yml
|
||||||
|
check: Raise exception on non-zero exit
|
||||||
|
capture: Capture stdout/stderr
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
CompletedProcess result
|
||||||
|
"""
|
||||||
|
cmd = ["docker", "compose"]
|
||||||
|
|
||||||
|
# Use project directory if specified
|
||||||
|
if project_dir:
|
||||||
|
cmd.extend(["-f", str(project_dir / "docker-compose.yml")])
|
||||||
|
|
||||||
|
cmd.extend(args)
|
||||||
|
|
||||||
|
env = os.environ.copy()
|
||||||
|
if project_dir:
|
||||||
|
env["COMPOSE_PROJECT_NAME"] = "ghydramcp"
|
||||||
|
|
||||||
|
return subprocess.run(
|
||||||
|
cmd,
|
||||||
|
check=check,
|
||||||
|
capture_output=capture,
|
||||||
|
text=True,
|
||||||
|
cwd=project_dir,
|
||||||
|
env=env,
|
||||||
|
)
|
||||||
|
|
||||||
|
@mcp_tool(
|
||||||
|
name="docker_status",
|
||||||
|
description="Check Docker availability and running GhydraMCP containers",
|
||||||
|
)
|
||||||
|
async def docker_status(self, ctx: Optional[Context] = None) -> Dict[str, Any]:
|
||||||
|
"""Check Docker status and list running GhydraMCP containers.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Status information including:
|
||||||
|
- docker_available: Whether Docker is installed
|
||||||
|
- docker_running: Whether Docker daemon is running
|
||||||
|
- containers: List of GhydraMCP containers with their status
|
||||||
|
- images: Available GhydraMCP images
|
||||||
|
"""
|
||||||
|
result = {
|
||||||
|
"docker_available": False,
|
||||||
|
"docker_running": False,
|
||||||
|
"containers": [],
|
||||||
|
"images": [],
|
||||||
|
"compose_available": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if docker is installed
|
||||||
|
if not self._check_docker_available():
|
||||||
|
return result
|
||||||
|
|
||||||
|
result["docker_available"] = True
|
||||||
|
|
||||||
|
# Check if docker daemon is running
|
||||||
|
try:
|
||||||
|
self._run_docker_cmd(["info"], check=True)
|
||||||
|
result["docker_running"] = True
|
||||||
|
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Check for docker compose
|
||||||
|
try:
|
||||||
|
self._run_docker_cmd(["compose", "version"], check=True)
|
||||||
|
result["compose_available"] = True
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# List GhydraMCP containers
|
||||||
|
try:
|
||||||
|
ps_result = self._run_docker_cmd(
|
||||||
|
[
|
||||||
|
"ps",
|
||||||
|
"-a",
|
||||||
|
"--filter",
|
||||||
|
"label=org.opencontainers.image.title=ghydramcp",
|
||||||
|
"--format",
|
||||||
|
"{{.ID}}\t{{.Names}}\t{{.Status}}\t{{.Ports}}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for line in ps_result.stdout.strip().split("\n"):
|
||||||
|
if line:
|
||||||
|
parts = line.split("\t")
|
||||||
|
if len(parts) >= 3:
|
||||||
|
result["containers"].append(
|
||||||
|
{
|
||||||
|
"id": parts[0],
|
||||||
|
"name": parts[1],
|
||||||
|
"status": parts[2],
|
||||||
|
"ports": parts[3] if len(parts) > 3 else "",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Also check by name pattern
|
||||||
|
try:
|
||||||
|
ps_result = self._run_docker_cmd(
|
||||||
|
[
|
||||||
|
"ps",
|
||||||
|
"-a",
|
||||||
|
"--filter",
|
||||||
|
"name=ghydramcp",
|
||||||
|
"--format",
|
||||||
|
"{{.ID}}\t{{.Names}}\t{{.Status}}\t{{.Ports}}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
existing_ids = {c["id"] for c in result["containers"]}
|
||||||
|
for line in ps_result.stdout.strip().split("\n"):
|
||||||
|
if line:
|
||||||
|
parts = line.split("\t")
|
||||||
|
if len(parts) >= 3 and parts[0] not in existing_ids:
|
||||||
|
result["containers"].append(
|
||||||
|
{
|
||||||
|
"id": parts[0],
|
||||||
|
"name": parts[1],
|
||||||
|
"status": parts[2],
|
||||||
|
"ports": parts[3] if len(parts) > 3 else "",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# List GhydraMCP images
|
||||||
|
try:
|
||||||
|
images_result = self._run_docker_cmd(
|
||||||
|
[
|
||||||
|
"images",
|
||||||
|
"--filter",
|
||||||
|
"reference=ghydramcp*",
|
||||||
|
"--format",
|
||||||
|
"{{.Repository}}:{{.Tag}}\t{{.Size}}\t{{.CreatedSince}}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for line in images_result.stdout.strip().split("\n"):
|
||||||
|
if line:
|
||||||
|
parts = line.split("\t")
|
||||||
|
if len(parts) >= 2:
|
||||||
|
result["images"].append(
|
||||||
|
{
|
||||||
|
"name": parts[0],
|
||||||
|
"size": parts[1],
|
||||||
|
"created": parts[2] if len(parts) > 2 else "",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@mcp_tool(
|
||||||
|
name="docker_start",
|
||||||
|
description="Start a GhydraMCP Docker container to analyze a binary",
|
||||||
|
)
|
||||||
|
async def docker_start(
|
||||||
|
self,
|
||||||
|
binary_path: str,
|
||||||
|
port: int = 8192,
|
||||||
|
memory: str = "2G",
|
||||||
|
name: Optional[str] = None,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Start a GhydraMCP Docker container for binary analysis.
|
||||||
|
|
||||||
|
This creates a new Ghidra instance in Docker with the GhydraMCP
|
||||||
|
plugin pre-installed. The binary will be imported and analyzed,
|
||||||
|
then the HTTP API will be available on the specified port.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
binary_path: Path to the binary file to analyze
|
||||||
|
port: Port to expose the HTTP API (default: 8192)
|
||||||
|
memory: Max JVM heap memory (default: 2G)
|
||||||
|
name: Container name (auto-generated if not specified)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Container info including ID, name, and API URL
|
||||||
|
"""
|
||||||
|
if not self._check_docker_available():
|
||||||
|
return {"error": "Docker is not available on this system"}
|
||||||
|
|
||||||
|
# Verify binary exists
|
||||||
|
binary_file = Path(binary_path).resolve()
|
||||||
|
if not binary_file.exists():
|
||||||
|
return {"error": f"Binary not found: {binary_path}"}
|
||||||
|
|
||||||
|
# Generate container name if not specified
|
||||||
|
if name is None:
|
||||||
|
name = f"ghydramcp-{binary_file.stem}-{port}"
|
||||||
|
|
||||||
|
# Clean up invalid characters in container name
|
||||||
|
name = "".join(c if c.isalnum() or c in "-_" else "-" for c in name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if container with this name already exists
|
||||||
|
check_result = self._run_docker_cmd(
|
||||||
|
["ps", "-a", "-q", "-f", f"name={name}"], check=False
|
||||||
|
)
|
||||||
|
if check_result.stdout.strip():
|
||||||
|
return {
|
||||||
|
"error": f"Container '{name}' already exists. Stop it first with docker_stop."
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if port is already in use
|
||||||
|
port_check = self._run_docker_cmd(
|
||||||
|
["ps", "-q", "-f", f"publish={port}"], check=False
|
||||||
|
)
|
||||||
|
if port_check.stdout.strip():
|
||||||
|
return {
|
||||||
|
"error": f"Port {port} is already in use by another container"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Start the container
|
||||||
|
run_result = self._run_docker_cmd(
|
||||||
|
[
|
||||||
|
"run",
|
||||||
|
"-d",
|
||||||
|
"--name",
|
||||||
|
name,
|
||||||
|
"-p",
|
||||||
|
f"{port}:8192",
|
||||||
|
"-v",
|
||||||
|
f"{binary_file.parent}:/binaries:ro",
|
||||||
|
"-e",
|
||||||
|
f"GHYDRA_MAXMEM={memory}",
|
||||||
|
"ghydramcp:latest",
|
||||||
|
f"/binaries/{binary_file.name}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
container_id = run_result.stdout.strip()
|
||||||
|
|
||||||
|
# Track the container
|
||||||
|
self._containers[container_id] = {
|
||||||
|
"name": name,
|
||||||
|
"port": port,
|
||||||
|
"binary": str(binary_file),
|
||||||
|
"memory": memory,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"container_id": container_id[:12],
|
||||||
|
"name": name,
|
||||||
|
"port": port,
|
||||||
|
"api_url": f"http://localhost:{port}/",
|
||||||
|
"binary": str(binary_file),
|
||||||
|
"message": (
|
||||||
|
f"Container started. Analysis in progress. "
|
||||||
|
f"API will be available at http://localhost:{port}/ once analysis completes. "
|
||||||
|
f"Use docker_logs('{name}') to monitor progress."
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
return {"error": f"Failed to start container: {e.stderr or e.stdout}"}
|
||||||
|
|
||||||
|
@mcp_tool(
|
||||||
|
name="docker_stop",
|
||||||
|
description="Stop a running GhydraMCP Docker container",
|
||||||
|
)
|
||||||
|
async def docker_stop(
|
||||||
|
self, name_or_id: str, remove: bool = True, ctx: Optional[Context] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Stop a GhydraMCP Docker container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name_or_id: Container name or ID
|
||||||
|
remove: Also remove the container (default: True)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Status message
|
||||||
|
"""
|
||||||
|
if not self._check_docker_available():
|
||||||
|
return {"error": "Docker is not available on this system"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Stop the container
|
||||||
|
self._run_docker_cmd(["stop", name_or_id])
|
||||||
|
|
||||||
|
if remove:
|
||||||
|
self._run_docker_cmd(["rm", name_or_id])
|
||||||
|
# Remove from tracking
|
||||||
|
self._containers = {
|
||||||
|
k: v
|
||||||
|
for k, v in self._containers.items()
|
||||||
|
if not (k.startswith(name_or_id) or v.get("name") == name_or_id)
|
||||||
|
}
|
||||||
|
return {"success": True, "message": f"Container '{name_or_id}' stopped and removed"}
|
||||||
|
else:
|
||||||
|
return {"success": True, "message": f"Container '{name_or_id}' stopped"}
|
||||||
|
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
return {"error": f"Failed to stop container: {e.stderr or e.stdout}"}
|
||||||
|
|
||||||
|
@mcp_tool(
|
||||||
|
name="docker_logs",
|
||||||
|
description="Get logs from a GhydraMCP Docker container",
|
||||||
|
)
|
||||||
|
async def docker_logs(
|
||||||
|
self,
|
||||||
|
name_or_id: str,
|
||||||
|
tail: int = 100,
|
||||||
|
follow: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get logs from a GhydraMCP container.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name_or_id: Container name or ID
|
||||||
|
tail: Number of lines to show (default: 100)
|
||||||
|
follow: Whether to follow log output (not recommended for MCP)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Container logs
|
||||||
|
"""
|
||||||
|
if not self._check_docker_available():
|
||||||
|
return {"error": "Docker is not available on this system"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
args = ["logs", "--tail", str(tail)]
|
||||||
|
if follow:
|
||||||
|
args.append("-f")
|
||||||
|
args.append(name_or_id)
|
||||||
|
|
||||||
|
result = self._run_docker_cmd(args)
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"container": name_or_id,
|
||||||
|
"logs": result.stdout + result.stderr,
|
||||||
|
}
|
||||||
|
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
return {"error": f"Failed to get logs: {e.stderr or e.stdout}"}
|
||||||
|
|
||||||
|
@mcp_tool(
|
||||||
|
name="docker_build",
|
||||||
|
description="Build the GhydraMCP Docker image from source",
|
||||||
|
)
|
||||||
|
async def docker_build(
|
||||||
|
self,
|
||||||
|
tag: str = "latest",
|
||||||
|
no_cache: bool = False,
|
||||||
|
project_dir: Optional[str] = None,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Build the GhydraMCP Docker image.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tag: Image tag (default: 'latest')
|
||||||
|
no_cache: Build without using cache
|
||||||
|
project_dir: Path to GhydraMCP project (auto-detected if not specified)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Build status
|
||||||
|
"""
|
||||||
|
if not self._check_docker_available():
|
||||||
|
return {"error": "Docker is not available on this system"}
|
||||||
|
|
||||||
|
# Find project directory
|
||||||
|
if project_dir:
|
||||||
|
proj_path = Path(project_dir)
|
||||||
|
else:
|
||||||
|
# Try to find docker/Dockerfile relative to this file
|
||||||
|
module_dir = Path(__file__).parent.parent.parent.parent
|
||||||
|
if (module_dir / "docker" / "Dockerfile").exists():
|
||||||
|
proj_path = module_dir
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"error": "Could not find GhydraMCP project directory. Please specify project_dir."
|
||||||
|
}
|
||||||
|
|
||||||
|
dockerfile = proj_path / "docker" / "Dockerfile"
|
||||||
|
if not dockerfile.exists():
|
||||||
|
return {"error": f"Dockerfile not found at {dockerfile}"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
args = [
|
||||||
|
"build",
|
||||||
|
"-t",
|
||||||
|
f"ghydramcp:{tag}",
|
||||||
|
"-f",
|
||||||
|
str(dockerfile),
|
||||||
|
]
|
||||||
|
if no_cache:
|
||||||
|
args.append("--no-cache")
|
||||||
|
args.append(str(proj_path))
|
||||||
|
|
||||||
|
# Run build (this can take a while)
|
||||||
|
result = self._run_docker_cmd(args, capture=True)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"success": True,
|
||||||
|
"image": f"ghydramcp:{tag}",
|
||||||
|
"message": f"Successfully built ghydramcp:{tag}",
|
||||||
|
"output": result.stdout[-2000:] if len(result.stdout) > 2000 else result.stdout,
|
||||||
|
}
|
||||||
|
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
return {"error": f"Build failed: {e.stderr or e.stdout}"}
|
||||||
|
|
||||||
|
@mcp_tool(
|
||||||
|
name="docker_health",
|
||||||
|
description="Check if a GhydraMCP container's API is responding",
|
||||||
|
)
|
||||||
|
async def docker_health(
|
||||||
|
self, port: int = 8192, timeout: float = 5.0, ctx: Optional[Context] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Check if a GhydraMCP container's API is healthy.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: API port to check (default: 8192)
|
||||||
|
timeout: Request timeout in seconds
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Health status and API info if available
|
||||||
|
"""
|
||||||
|
import urllib.request
|
||||||
|
import urllib.error
|
||||||
|
import json
|
||||||
|
|
||||||
|
url = f"http://localhost:{port}/"
|
||||||
|
|
||||||
|
try:
|
||||||
|
req = urllib.request.Request(url)
|
||||||
|
with urllib.request.urlopen(req, timeout=timeout) as response:
|
||||||
|
data = json.loads(response.read().decode())
|
||||||
|
return {
|
||||||
|
"healthy": True,
|
||||||
|
"port": port,
|
||||||
|
"api_version": data.get("api_version"),
|
||||||
|
"program": data.get("program"),
|
||||||
|
"file": data.get("file"),
|
||||||
|
}
|
||||||
|
except urllib.error.URLError as e:
|
||||||
|
return {
|
||||||
|
"healthy": False,
|
||||||
|
"port": port,
|
||||||
|
"error": str(e.reason),
|
||||||
|
"message": "Container may still be starting or analyzing binary",
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
return {
|
||||||
|
"healthy": False,
|
||||||
|
"port": port,
|
||||||
|
"error": str(e),
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_tool(
|
||||||
|
name="docker_wait",
|
||||||
|
description="Wait for a GhydraMCP container to become healthy",
|
||||||
|
)
|
||||||
|
async def docker_wait(
|
||||||
|
self,
|
||||||
|
port: int = 8192,
|
||||||
|
timeout: float = 300.0,
|
||||||
|
interval: float = 5.0,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Wait for a GhydraMCP container to become healthy.
|
||||||
|
|
||||||
|
Polls the API endpoint until it responds or timeout is reached.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: API port to check (default: 8192)
|
||||||
|
timeout: Maximum time to wait in seconds (default: 300)
|
||||||
|
interval: Polling interval in seconds (default: 5)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Health status once healthy, or error on timeout
|
||||||
|
"""
|
||||||
|
import time
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
last_error = None
|
||||||
|
|
||||||
|
while (time.time() - start_time) < timeout:
|
||||||
|
result = await self.docker_health(port=port, timeout=interval, ctx=ctx)
|
||||||
|
if result.get("healthy"):
|
||||||
|
result["waited_seconds"] = round(time.time() - start_time, 1)
|
||||||
|
return result
|
||||||
|
last_error = result.get("error")
|
||||||
|
await asyncio.sleep(interval)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"healthy": False,
|
||||||
|
"port": port,
|
||||||
|
"error": f"Timeout after {timeout}s waiting for container",
|
||||||
|
"last_error": last_error,
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_tool(
|
||||||
|
name="docker_auto_start",
|
||||||
|
description="Automatically start a GhydraMCP container if no Ghidra instance is available",
|
||||||
|
)
|
||||||
|
async def docker_auto_start(
|
||||||
|
self,
|
||||||
|
binary_path: str,
|
||||||
|
port: int = 8192,
|
||||||
|
wait: bool = True,
|
||||||
|
timeout: float = 300.0,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Automatically start a Docker container if no Ghidra instance is available.
|
||||||
|
|
||||||
|
This is the main entry point for automatic Docker management:
|
||||||
|
1. Checks if a Ghidra instance is already running on the port
|
||||||
|
2. If not, starts a new Docker container
|
||||||
|
3. Optionally waits for the container to become healthy
|
||||||
|
4. Returns connection info for the instance
|
||||||
|
|
||||||
|
Args:
|
||||||
|
binary_path: Path to the binary to analyze
|
||||||
|
port: Port for the HTTP API (default: 8192)
|
||||||
|
wait: Wait for container to be ready (default: True)
|
||||||
|
timeout: Max wait time in seconds (default: 300)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Instance connection info
|
||||||
|
"""
|
||||||
|
# First, check if there's already a Ghidra instance on this port
|
||||||
|
health = await self.docker_health(port=port, ctx=ctx)
|
||||||
|
if health.get("healthy"):
|
||||||
|
return {
|
||||||
|
"source": "existing",
|
||||||
|
"port": port,
|
||||||
|
"api_url": f"http://localhost:{port}/",
|
||||||
|
"program": health.get("program"),
|
||||||
|
"message": "Using existing Ghidra instance",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if Docker is available
|
||||||
|
status = await self.docker_status(ctx=ctx)
|
||||||
|
if not status.get("docker_running"):
|
||||||
|
return {
|
||||||
|
"error": "Docker is not available. Please install Docker or start Ghidra manually."
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if we have the image
|
||||||
|
if not any("ghydramcp" in img.get("name", "") for img in status.get("images", [])):
|
||||||
|
return {
|
||||||
|
"error": (
|
||||||
|
"GhydraMCP Docker image not found. "
|
||||||
|
"Build it with docker_build() or 'make build' first."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
# Start a new container
|
||||||
|
start_result = await self.docker_start(
|
||||||
|
binary_path=binary_path, port=port, ctx=ctx
|
||||||
|
)
|
||||||
|
|
||||||
|
if not start_result.get("success"):
|
||||||
|
return start_result
|
||||||
|
|
||||||
|
if wait:
|
||||||
|
# Wait for the container to become healthy
|
||||||
|
wait_result = await self.docker_wait(port=port, timeout=timeout, ctx=ctx)
|
||||||
|
if wait_result.get("healthy"):
|
||||||
|
return {
|
||||||
|
"source": "docker",
|
||||||
|
"container_id": start_result.get("container_id"),
|
||||||
|
"container_name": start_result.get("name"),
|
||||||
|
"port": port,
|
||||||
|
"api_url": f"http://localhost:{port}/",
|
||||||
|
"program": wait_result.get("program"),
|
||||||
|
"waited_seconds": wait_result.get("waited_seconds"),
|
||||||
|
"message": f"Docker container ready after {wait_result.get('waited_seconds')}s",
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"warning": "Container started but not yet healthy",
|
||||||
|
"container_id": start_result.get("container_id"),
|
||||||
|
"port": port,
|
||||||
|
"last_error": wait_result.get("error"),
|
||||||
|
"message": "Container may still be analyzing. Check docker_logs() for progress.",
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
"source": "docker",
|
||||||
|
"container_id": start_result.get("container_id"),
|
||||||
|
"container_name": start_result.get("name"),
|
||||||
|
"port": port,
|
||||||
|
"api_url": f"http://localhost:{port}/",
|
||||||
|
"message": "Container starting. Use docker_wait() or docker_health() to check status.",
|
||||||
|
}
|
||||||
524
src/ghydramcp/mixins/functions.py
Normal file
524
src/ghydramcp/mixins/functions.py
Normal file
@ -0,0 +1,524 @@
|
|||||||
|
"""Functions mixin for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools for function analysis, decompilation, and manipulation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
from fastmcp import Context
|
||||||
|
from fastmcp.contrib.mcp_mixin import mcp_tool, mcp_resource
|
||||||
|
|
||||||
|
from .base import GhydraMixinBase
|
||||||
|
from ..config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
class FunctionsMixin(GhydraMixinBase):
|
||||||
|
"""Mixin for function operations.
|
||||||
|
|
||||||
|
Provides tools for:
|
||||||
|
- Listing and searching functions
|
||||||
|
- Decompiling functions
|
||||||
|
- Disassembling functions
|
||||||
|
- Renaming functions
|
||||||
|
- Setting function signatures
|
||||||
|
- Managing function variables
|
||||||
|
"""
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def functions_list(
|
||||||
|
self,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""List functions with cursor-based pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Functions per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter function names
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all functions without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Paginated list of functions
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
response = self.safe_get(port, "functions", {"limit": 10000})
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
functions = simplified.get("result", [])
|
||||||
|
if not isinstance(functions, list):
|
||||||
|
functions = []
|
||||||
|
|
||||||
|
query_params = {"tool": "functions_list", "port": port, "grep": grep}
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
return self.paginate_response(
|
||||||
|
data=functions,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="functions_list",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def functions_get(
|
||||||
|
self,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
address: Optional[str] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get detailed information about a function.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Function name (mutually exclusive with address)
|
||||||
|
address: Function address in hex format
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Detailed function information
|
||||||
|
"""
|
||||||
|
if not name and not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Either name or address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
if address:
|
||||||
|
endpoint = f"functions/{address}"
|
||||||
|
else:
|
||||||
|
endpoint = f"functions/by-name/{quote(name)}"
|
||||||
|
|
||||||
|
response = self.safe_get(port, endpoint)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def functions_decompile(
|
||||||
|
self,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
address: Optional[str] = None,
|
||||||
|
syntax_tree: bool = False,
|
||||||
|
style: str = "normalize",
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get decompiled code for a function with line pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Function name (mutually exclusive with address)
|
||||||
|
address: Function address in hex format
|
||||||
|
syntax_tree: Include syntax tree (default: False)
|
||||||
|
style: Decompiler style (default: "normalize")
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Lines per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter lines
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all lines without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decompiled code with pagination
|
||||||
|
"""
|
||||||
|
if not name and not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Either name or address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
if address:
|
||||||
|
endpoint = f"functions/{address}/decompile"
|
||||||
|
else:
|
||||||
|
endpoint = f"functions/by-name/{quote(name)}/decompile"
|
||||||
|
|
||||||
|
params = {"syntaxTree": str(syntax_tree).lower(), "style": style}
|
||||||
|
response = self.safe_get(port, endpoint, params)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
result = simplified.get("result", {})
|
||||||
|
decompiled = result.get("decompiled_text", result.get("ccode", ""))
|
||||||
|
|
||||||
|
if not decompiled:
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
# Split into lines for pagination
|
||||||
|
lines = decompiled.split("\n")
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
"tool": "functions_decompile",
|
||||||
|
"port": port,
|
||||||
|
"name": name,
|
||||||
|
"address": address,
|
||||||
|
"grep": grep,
|
||||||
|
}
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
paginated = self.paginate_response(
|
||||||
|
data=lines,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="functions_decompile",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert lines back to text in result
|
||||||
|
if paginated.get("success"):
|
||||||
|
paginated["result"] = "\n".join(paginated.get("result", []))
|
||||||
|
paginated["function_name"] = result.get("name", name or address)
|
||||||
|
|
||||||
|
return paginated
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def functions_disassemble(
|
||||||
|
self,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
address: Optional[str] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get disassembly for a function with instruction pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Function name (mutually exclusive with address)
|
||||||
|
address: Function address in hex format
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Instructions per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter instructions
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all instructions without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Disassembly with pagination
|
||||||
|
"""
|
||||||
|
if not name and not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Either name or address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
if address:
|
||||||
|
endpoint = f"functions/{address}/disassembly"
|
||||||
|
else:
|
||||||
|
endpoint = f"functions/by-name/{quote(name)}/disassembly"
|
||||||
|
|
||||||
|
response = self.safe_get(port, endpoint)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
result = simplified.get("result", {})
|
||||||
|
disasm_text = result.get("disassembly_text", "")
|
||||||
|
|
||||||
|
if not disasm_text:
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
# Split into lines for pagination
|
||||||
|
lines = [line for line in disasm_text.split("\n") if line.strip()]
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
"tool": "functions_disassemble",
|
||||||
|
"port": port,
|
||||||
|
"name": name,
|
||||||
|
"address": address,
|
||||||
|
"grep": grep,
|
||||||
|
}
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
paginated = self.paginate_response(
|
||||||
|
data=lines,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="functions_disassemble",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert lines back to text
|
||||||
|
if paginated.get("success"):
|
||||||
|
paginated["result"] = "\n".join(paginated.get("result", []))
|
||||||
|
paginated["function_name"] = result.get("name", name or address)
|
||||||
|
|
||||||
|
return paginated
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def functions_rename(
|
||||||
|
self,
|
||||||
|
old_name: Optional[str] = None,
|
||||||
|
address: Optional[str] = None,
|
||||||
|
new_name: str = "",
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Rename a function.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
old_name: Current function name
|
||||||
|
address: Function address in hex format
|
||||||
|
new_name: New name for the function
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not new_name:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "new_name parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if not old_name and not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Either old_name or address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
if address:
|
||||||
|
endpoint = f"functions/{address}"
|
||||||
|
else:
|
||||||
|
endpoint = f"functions/by-name/{quote(old_name)}"
|
||||||
|
|
||||||
|
payload = {"name": new_name}
|
||||||
|
response = self.safe_patch(port, endpoint, payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def functions_create(self, address: str, port: Optional[int] = None) -> Dict[str, Any]:
|
||||||
|
"""Create a function at the specified address.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Memory address in hex format
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created function information
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {"address": address}
|
||||||
|
response = self.safe_post(port, "functions", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def functions_set_signature(
|
||||||
|
self,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
address: Optional[str] = None,
|
||||||
|
signature: str = "",
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Set the signature/prototype of a function.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Function name
|
||||||
|
address: Function address in hex format
|
||||||
|
signature: New function signature (e.g., "int foo(char* arg1, int arg2)")
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not signature:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "signature parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if not name and not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Either name or address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
if address:
|
||||||
|
endpoint = f"functions/{address}/signature"
|
||||||
|
else:
|
||||||
|
endpoint = f"functions/by-name/{quote(name)}/signature"
|
||||||
|
|
||||||
|
payload = {"signature": signature}
|
||||||
|
response = self.safe_put(port, endpoint, payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
# Resources
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}/functions")
|
||||||
|
def resource_functions_list(self, port: Optional[int] = None) -> Dict[str, Any]:
|
||||||
|
"""MCP Resource: List functions (capped).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of functions (capped at 1000)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
cap = config.resource_caps.get("functions", 1000)
|
||||||
|
|
||||||
|
response = self.safe_get(port, "functions", {"limit": cap})
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
functions = simplified.get("result", [])
|
||||||
|
if not isinstance(functions, list):
|
||||||
|
functions = []
|
||||||
|
|
||||||
|
return {
|
||||||
|
"functions": functions[:cap],
|
||||||
|
"count": len(functions),
|
||||||
|
"capped_at": cap if len(functions) >= cap else None,
|
||||||
|
"_hint": f"Use functions_list() tool for full pagination" if len(functions) >= cap else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}/function/decompile/address/{address}")
|
||||||
|
def resource_decompiled_by_address(
|
||||||
|
self, port: Optional[int] = None, address: Optional[str] = None
|
||||||
|
) -> str:
|
||||||
|
"""MCP Resource: Get decompiled code by address.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
address: Function address
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decompiled code as text
|
||||||
|
"""
|
||||||
|
if not address:
|
||||||
|
return "Error: address is required"
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return f"Error: {e}"
|
||||||
|
|
||||||
|
response = self.safe_get(port, f"functions/{address}/decompile")
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
error = simplified.get("error", {})
|
||||||
|
return f"Error: {error.get('message', 'Unknown error')}"
|
||||||
|
|
||||||
|
result = simplified.get("result", {})
|
||||||
|
return result.get("decompiled_text", result.get("ccode", "No decompiled code available"))
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}/function/decompile/name/{name}")
|
||||||
|
def resource_decompiled_by_name(
|
||||||
|
self, port: Optional[int] = None, name: Optional[str] = None
|
||||||
|
) -> str:
|
||||||
|
"""MCP Resource: Get decompiled code by function name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
name: Function name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decompiled code as text
|
||||||
|
"""
|
||||||
|
if not name:
|
||||||
|
return "Error: name is required"
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return f"Error: {e}"
|
||||||
|
|
||||||
|
response = self.safe_get(port, f"functions/by-name/{quote(name)}/decompile")
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
error = simplified.get("error", {})
|
||||||
|
return f"Error: {error.get('message', 'Unknown error')}"
|
||||||
|
|
||||||
|
result = simplified.get("result", {})
|
||||||
|
return result.get("decompiled_text", result.get("ccode", "No decompiled code available"))
|
||||||
298
src/ghydramcp/mixins/instances.py
Normal file
298
src/ghydramcp/mixins/instances.py
Normal file
@ -0,0 +1,298 @@
|
|||||||
|
"""Instance management mixin for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools for discovering, registering, and managing Ghidra instances.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import time
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from fastmcp.contrib.mcp_mixin import mcp_tool, mcp_resource
|
||||||
|
|
||||||
|
from .base import GhydraMixinBase
|
||||||
|
from ..config import get_config
|
||||||
|
from ..core.http_client import safe_get
|
||||||
|
|
||||||
|
|
||||||
|
class InstancesMixin(GhydraMixinBase):
|
||||||
|
"""Mixin for Ghidra instance management.
|
||||||
|
|
||||||
|
Provides tools for:
|
||||||
|
- Discovering Ghidra instances
|
||||||
|
- Registering/unregistering instances
|
||||||
|
- Setting current working instance
|
||||||
|
- Listing available instances
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _discover_instances(
|
||||||
|
self,
|
||||||
|
port_range: range,
|
||||||
|
host: Optional[str] = None,
|
||||||
|
timeout: float = 0.5,
|
||||||
|
) -> Dict[int, Dict[str, Any]]:
|
||||||
|
"""Discover Ghidra instances by scanning ports.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port_range: Range of ports to scan
|
||||||
|
host: Host to scan (defaults to config)
|
||||||
|
timeout: Connection timeout per port
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict of discovered instances
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
config = get_config()
|
||||||
|
if host is None:
|
||||||
|
host = config.ghidra_host
|
||||||
|
|
||||||
|
discovered = {}
|
||||||
|
|
||||||
|
for port in port_range:
|
||||||
|
try:
|
||||||
|
url = f"http://{host}:{port}/"
|
||||||
|
response = requests.get(url, timeout=timeout)
|
||||||
|
if response.ok:
|
||||||
|
data = response.json()
|
||||||
|
# Verify it's a Ghidra HATEOAS API
|
||||||
|
if "_links" in data or "api_version" in data:
|
||||||
|
instance_info = {
|
||||||
|
"url": f"http://{host}:{port}",
|
||||||
|
"project": data.get("project", ""),
|
||||||
|
"file": data.get("file", ""),
|
||||||
|
"api_version": data.get("api_version"),
|
||||||
|
"discovered_at": time.time(),
|
||||||
|
}
|
||||||
|
discovered[port] = instance_info
|
||||||
|
|
||||||
|
# Also register it
|
||||||
|
with self._instances_lock:
|
||||||
|
self._instances[port] = instance_info
|
||||||
|
|
||||||
|
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError):
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return discovered
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def instances_list(self) -> Dict[str, Any]:
|
||||||
|
"""List all active Ghidra instances.
|
||||||
|
|
||||||
|
This is the primary tool for working with instances. It automatically
|
||||||
|
discovers new instances on the default host before listing.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing 'instances' list with all available Ghidra instances
|
||||||
|
"""
|
||||||
|
config = get_config()
|
||||||
|
# Auto-discover before listing
|
||||||
|
self._discover_instances(config.quick_discovery_range, timeout=0.5)
|
||||||
|
|
||||||
|
with self._instances_lock:
|
||||||
|
return {
|
||||||
|
"instances": [
|
||||||
|
{
|
||||||
|
"port": port,
|
||||||
|
"url": info["url"],
|
||||||
|
"project": info.get("project", ""),
|
||||||
|
"file": info.get("file", ""),
|
||||||
|
}
|
||||||
|
for port, info in self._instances.items()
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def instances_discover(self, host: Optional[str] = None) -> Dict[str, Any]:
|
||||||
|
"""Discover Ghidra instances on a specific host.
|
||||||
|
|
||||||
|
Use this ONLY when you need to discover instances on a different host.
|
||||||
|
For normal usage, just use instances_list() which auto-discovers.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
host: Host to scan for Ghidra instances (default: configured host)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict containing 'instances' list with all available instances
|
||||||
|
"""
|
||||||
|
config = get_config()
|
||||||
|
self._discover_instances(config.full_discovery_range, host=host, timeout=0.5)
|
||||||
|
|
||||||
|
with self._instances_lock:
|
||||||
|
return {
|
||||||
|
"instances": [
|
||||||
|
{
|
||||||
|
"port": port,
|
||||||
|
"url": info["url"],
|
||||||
|
"project": info.get("project", ""),
|
||||||
|
"file": info.get("file", ""),
|
||||||
|
}
|
||||||
|
for port, info in self._instances.items()
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def instances_register(self, port: int, url: Optional[str] = None) -> str:
|
||||||
|
"""Register a new Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Port number of the Ghidra instance
|
||||||
|
url: Optional URL if different from default
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Confirmation message or error
|
||||||
|
"""
|
||||||
|
return self.register_instance(port, url)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def instances_unregister(self, port: int) -> str:
|
||||||
|
"""Unregister a Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Port number of the instance to unregister
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Confirmation message
|
||||||
|
"""
|
||||||
|
return self.unregister_instance(port)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def instances_use(self, port: int) -> str:
|
||||||
|
"""Set the current working Ghidra instance.
|
||||||
|
|
||||||
|
All subsequent commands will use this instance by default.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Port number of the instance to use
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Confirmation message with instance details
|
||||||
|
"""
|
||||||
|
with self._instances_lock:
|
||||||
|
if port not in self._instances:
|
||||||
|
# Try to register it first
|
||||||
|
result = self.register_instance(port)
|
||||||
|
if "Failed" in result or "Error" in result:
|
||||||
|
return result
|
||||||
|
|
||||||
|
self.set_current_port(port)
|
||||||
|
|
||||||
|
info = self.get_instance_info(port)
|
||||||
|
if info:
|
||||||
|
return (
|
||||||
|
f"Now using Ghidra instance on port {port}\n"
|
||||||
|
f"Project: {info.get('project', 'N/A')}\n"
|
||||||
|
f"File: {info.get('file', 'N/A')}"
|
||||||
|
)
|
||||||
|
return f"Now using Ghidra instance on port {port}"
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def instances_current(self) -> Dict[str, Any]:
|
||||||
|
"""Get information about the current working instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with current instance information or error message
|
||||||
|
"""
|
||||||
|
port = self.get_current_port()
|
||||||
|
if port is None:
|
||||||
|
return {
|
||||||
|
"error": "No current instance set. Use instances_use(port) first.",
|
||||||
|
"available_instances": list(self._instances.keys()),
|
||||||
|
}
|
||||||
|
|
||||||
|
info = self.get_instance_info(port)
|
||||||
|
if info:
|
||||||
|
return {
|
||||||
|
"port": port,
|
||||||
|
"url": info["url"],
|
||||||
|
"project": info.get("project", ""),
|
||||||
|
"file": info.get("file", ""),
|
||||||
|
}
|
||||||
|
|
||||||
|
return {"port": port, "status": "registered but no details available"}
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instances")
|
||||||
|
def resource_instances_list(self) -> Dict[str, Any]:
|
||||||
|
"""MCP Resource: List all active Ghidra instances.
|
||||||
|
|
||||||
|
Returns a lightweight enumeration of instances for quick reference.
|
||||||
|
"""
|
||||||
|
config = get_config()
|
||||||
|
# Auto-discover before listing
|
||||||
|
self._discover_instances(config.quick_discovery_range, timeout=0.3)
|
||||||
|
|
||||||
|
with self._instances_lock:
|
||||||
|
instances = [
|
||||||
|
{
|
||||||
|
"port": port,
|
||||||
|
"project": info.get("project", ""),
|
||||||
|
"file": info.get("file", ""),
|
||||||
|
}
|
||||||
|
for port, info in self._instances.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
return {
|
||||||
|
"instances": instances,
|
||||||
|
"count": len(instances),
|
||||||
|
"current_port": self.get_current_port(),
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}")
|
||||||
|
def resource_instance_info(self, port: int) -> Dict[str, Any]:
|
||||||
|
"""MCP Resource: Get detailed information about a Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Port number of the instance
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Instance information including program details
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
response = self.safe_get(port, "")
|
||||||
|
if not response.get("success", True):
|
||||||
|
return response
|
||||||
|
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}/summary")
|
||||||
|
def resource_program_summary(self, port: Optional[int] = None) -> Dict[str, Any]:
|
||||||
|
"""MCP Resource: Get a summary of the program loaded in a Ghidra instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port (optional, uses current if not specified)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Program summary with statistics
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
# Get program info
|
||||||
|
response = self.safe_get(port, "")
|
||||||
|
if not response.get("success", True):
|
||||||
|
return response
|
||||||
|
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
# Get function count
|
||||||
|
funcs_response = self.safe_get(port, "functions", {"limit": 1})
|
||||||
|
func_count = funcs_response.get("size", 0) if funcs_response.get("success", True) else 0
|
||||||
|
|
||||||
|
# Get string count
|
||||||
|
strings_response = self.safe_get(port, "data/strings", {"limit": 1})
|
||||||
|
string_count = strings_response.get("size", 0) if strings_response.get("success", True) else 0
|
||||||
|
|
||||||
|
return {
|
||||||
|
"program_name": simplified.get("result", {}).get("program_name", simplified.get("file", "")),
|
||||||
|
"language": simplified.get("result", {}).get("language", ""),
|
||||||
|
"processor": simplified.get("result", {}).get("processor", ""),
|
||||||
|
"format": simplified.get("result", {}).get("format", ""),
|
||||||
|
"function_count": func_count,
|
||||||
|
"string_count": string_count,
|
||||||
|
"port": port,
|
||||||
|
}
|
||||||
129
src/ghydramcp/mixins/memory.py
Normal file
129
src/ghydramcp/mixins/memory.py
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
"""Memory mixin for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools for memory read/write operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from fastmcp.contrib.mcp_mixin import mcp_tool
|
||||||
|
|
||||||
|
from .base import GhydraMixinBase
|
||||||
|
|
||||||
|
|
||||||
|
class MemoryMixin(GhydraMixinBase):
|
||||||
|
"""Mixin for memory operations.
|
||||||
|
|
||||||
|
Provides tools for:
|
||||||
|
- Reading memory bytes
|
||||||
|
- Writing memory bytes (use with caution)
|
||||||
|
"""
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def memory_read(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
length: int = 16,
|
||||||
|
format: str = "hex",
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Read bytes from memory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Memory address in hex format
|
||||||
|
length: Number of bytes to read (default: 16)
|
||||||
|
format: Output format - "hex", "base64", or "string" (default: "hex")
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Memory contents in the requested format
|
||||||
|
"""
|
||||||
|
if not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
params = {
|
||||||
|
"address": address,
|
||||||
|
"length": length,
|
||||||
|
"format": format,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = self.safe_get(port, "memory", params)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if "result" in simplified and isinstance(simplified["result"], dict):
|
||||||
|
result = simplified["result"]
|
||||||
|
memory_info = {
|
||||||
|
"success": True,
|
||||||
|
"address": result.get("address", address),
|
||||||
|
"length": result.get("bytesRead", length),
|
||||||
|
"format": format,
|
||||||
|
}
|
||||||
|
|
||||||
|
if "hexBytes" in result:
|
||||||
|
memory_info["hexBytes"] = result["hexBytes"]
|
||||||
|
if "rawBytes" in result:
|
||||||
|
memory_info["rawBytes"] = result["rawBytes"]
|
||||||
|
|
||||||
|
return memory_info
|
||||||
|
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def memory_write(
|
||||||
|
self,
|
||||||
|
address: str,
|
||||||
|
bytes_data: str,
|
||||||
|
format: str = "hex",
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Write bytes to memory (use with caution).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
address: Memory address in hex format
|
||||||
|
bytes_data: Data to write (format depends on 'format' parameter)
|
||||||
|
format: Input format - "hex", "base64", or "string" (default: "hex")
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not address:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Address parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if not bytes_data:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Bytes parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"bytes": bytes_data,
|
||||||
|
"format": format,
|
||||||
|
}
|
||||||
|
|
||||||
|
response = self.safe_patch(port, f"programs/current/memory/{address}", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
419
src/ghydramcp/mixins/structs.py
Normal file
419
src/ghydramcp/mixins/structs.py
Normal file
@ -0,0 +1,419 @@
|
|||||||
|
"""Structs mixin for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools for struct data type operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from fastmcp import Context
|
||||||
|
from fastmcp.contrib.mcp_mixin import mcp_tool, mcp_resource
|
||||||
|
|
||||||
|
from .base import GhydraMixinBase
|
||||||
|
from ..config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
class StructsMixin(GhydraMixinBase):
|
||||||
|
"""Mixin for struct operations.
|
||||||
|
|
||||||
|
Provides tools for:
|
||||||
|
- Listing and searching structs
|
||||||
|
- Getting struct details with field pagination
|
||||||
|
- Creating and modifying structs
|
||||||
|
- Managing struct fields
|
||||||
|
"""
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def structs_list(
|
||||||
|
self,
|
||||||
|
category: Optional[str] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""List all struct data types with cursor-based pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
category: Filter by category path (e.g. "/winapi")
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Items per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter struct names
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all results without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Structs with pagination metadata
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
params = {"offset": 0, "limit": 10000}
|
||||||
|
if category:
|
||||||
|
params["category"] = category
|
||||||
|
|
||||||
|
response = self.safe_get(port, "structs", params)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
all_structs = simplified.get("result", [])
|
||||||
|
if not isinstance(all_structs, list):
|
||||||
|
all_structs = []
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
"tool": "structs_list",
|
||||||
|
"port": port,
|
||||||
|
"category": category,
|
||||||
|
"grep": grep,
|
||||||
|
}
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
return self.paginate_response(
|
||||||
|
data=all_structs,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="structs_list",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def structs_get(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get detailed information about a struct with field pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Struct name
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Fields per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter fields
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all fields without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Struct details with paginated fields
|
||||||
|
"""
|
||||||
|
if not name:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Struct name parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
params = {"name": name}
|
||||||
|
response = self.safe_get(port, "structs", params)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
result = simplified.get("result", simplified)
|
||||||
|
|
||||||
|
# Extract struct info and fields
|
||||||
|
struct_info = {}
|
||||||
|
fields = []
|
||||||
|
|
||||||
|
if isinstance(result, dict):
|
||||||
|
for key, value in result.items():
|
||||||
|
if key == "fields" and isinstance(value, list):
|
||||||
|
fields = value
|
||||||
|
else:
|
||||||
|
struct_info[key] = value
|
||||||
|
|
||||||
|
# If few fields and no grep, return as-is
|
||||||
|
if len(fields) <= 10 and not grep:
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
"tool": "structs_get",
|
||||||
|
"port": port,
|
||||||
|
"name": name,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Paginate fields
|
||||||
|
paginated = self.paginate_response(
|
||||||
|
data=fields,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="structs_get",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Merge struct metadata with paginated fields
|
||||||
|
if paginated.get("success"):
|
||||||
|
paginated["struct_name"] = struct_info.get("name", name)
|
||||||
|
paginated["struct_size"] = struct_info.get("size", struct_info.get("length"))
|
||||||
|
paginated["struct_category"] = struct_info.get("category", struct_info.get("categoryPath"))
|
||||||
|
paginated["struct_description"] = struct_info.get("description")
|
||||||
|
paginated["fields"] = paginated.pop("result", [])
|
||||||
|
|
||||||
|
if "_message" in paginated:
|
||||||
|
paginated["_message"] = paginated["_message"].replace("items", "fields")
|
||||||
|
|
||||||
|
return paginated
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def structs_create(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
category: Optional[str] = None,
|
||||||
|
description: Optional[str] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Create a new struct data type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Name for the new struct
|
||||||
|
category: Category path (e.g. "/custom")
|
||||||
|
description: Optional description
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created struct information
|
||||||
|
"""
|
||||||
|
if not name:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Struct name parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {"name": name}
|
||||||
|
if category:
|
||||||
|
payload["category"] = category
|
||||||
|
if description:
|
||||||
|
payload["description"] = description
|
||||||
|
|
||||||
|
response = self.safe_post(port, "structs/create", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def structs_add_field(
|
||||||
|
self,
|
||||||
|
struct_name: str,
|
||||||
|
field_name: str,
|
||||||
|
field_type: str,
|
||||||
|
offset: Optional[int] = None,
|
||||||
|
comment: Optional[str] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Add a field to an existing struct.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
struct_name: Name of the struct to modify
|
||||||
|
field_name: Name for the new field
|
||||||
|
field_type: Data type for the field
|
||||||
|
offset: Specific offset (appends if not specified)
|
||||||
|
comment: Optional field comment
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not struct_name or not field_name or not field_type:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "struct_name, field_name, and field_type are required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"struct": struct_name,
|
||||||
|
"fieldName": field_name,
|
||||||
|
"fieldType": field_type,
|
||||||
|
}
|
||||||
|
if offset is not None:
|
||||||
|
payload["offset"] = offset
|
||||||
|
if comment:
|
||||||
|
payload["comment"] = comment
|
||||||
|
|
||||||
|
response = self.safe_post(port, "structs/addfield", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def structs_update_field(
|
||||||
|
self,
|
||||||
|
struct_name: str,
|
||||||
|
field_name: Optional[str] = None,
|
||||||
|
field_offset: Optional[int] = None,
|
||||||
|
new_name: Optional[str] = None,
|
||||||
|
new_type: Optional[str] = None,
|
||||||
|
new_comment: Optional[str] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Update an existing field in a struct.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
struct_name: Name of the struct to modify
|
||||||
|
field_name: Name of the field to update (OR field_offset)
|
||||||
|
field_offset: Offset of the field to update (OR field_name)
|
||||||
|
new_name: New name for the field
|
||||||
|
new_type: New data type for the field
|
||||||
|
new_comment: New comment for the field
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not struct_name:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "struct_name parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if not field_name and field_offset is None:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Either field_name or field_offset must be provided",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if not new_name and not new_type and new_comment is None:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "At least one of new_name, new_type, or new_comment required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {"struct": struct_name}
|
||||||
|
if field_name:
|
||||||
|
payload["fieldName"] = field_name
|
||||||
|
if field_offset is not None:
|
||||||
|
payload["fieldOffset"] = field_offset
|
||||||
|
if new_name:
|
||||||
|
payload["newName"] = new_name
|
||||||
|
if new_type:
|
||||||
|
payload["newType"] = new_type
|
||||||
|
if new_comment is not None:
|
||||||
|
payload["newComment"] = new_comment
|
||||||
|
|
||||||
|
response = self.safe_post(port, "structs/updatefield", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def structs_delete(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Delete a struct data type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Name of the struct to delete
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Operation result
|
||||||
|
"""
|
||||||
|
if not name:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Struct name parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
payload = {"name": name}
|
||||||
|
response = self.safe_post(port, "structs/delete", payload)
|
||||||
|
return self.simplify_response(response)
|
||||||
|
|
||||||
|
# Resources
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}/structs")
|
||||||
|
def resource_structs_list(self, port: Optional[int] = None) -> Dict[str, Any]:
|
||||||
|
"""MCP Resource: List structs (capped).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of structs (capped at 1000)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
cap = config.resource_caps.get("structs", 1000)
|
||||||
|
|
||||||
|
response = self.safe_get(port, "structs", {"limit": cap})
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
structs = simplified.get("result", [])
|
||||||
|
if not isinstance(structs, list):
|
||||||
|
structs = []
|
||||||
|
|
||||||
|
return {
|
||||||
|
"structs": structs[:cap],
|
||||||
|
"count": len(structs),
|
||||||
|
"capped_at": cap if len(structs) >= cap else None,
|
||||||
|
"_hint": "Use structs_list() tool for full pagination" if len(structs) >= cap else None,
|
||||||
|
}
|
||||||
196
src/ghydramcp/mixins/xrefs.py
Normal file
196
src/ghydramcp/mixins/xrefs.py
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
"""Cross-references mixin for GhydraMCP.
|
||||||
|
|
||||||
|
Provides tools for cross-reference (xref) operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from fastmcp import Context
|
||||||
|
from fastmcp.contrib.mcp_mixin import mcp_tool, mcp_resource
|
||||||
|
|
||||||
|
from .base import GhydraMixinBase
|
||||||
|
from ..config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
class XrefsMixin(GhydraMixinBase):
|
||||||
|
"""Mixin for cross-reference operations.
|
||||||
|
|
||||||
|
Provides tools for:
|
||||||
|
- Listing references to an address
|
||||||
|
- Listing references from an address
|
||||||
|
- Filtering by reference type
|
||||||
|
"""
|
||||||
|
|
||||||
|
@mcp_tool()
|
||||||
|
def xrefs_list(
|
||||||
|
self,
|
||||||
|
to_addr: Optional[str] = None,
|
||||||
|
from_addr: Optional[str] = None,
|
||||||
|
type: Optional[str] = None,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
page_size: int = 50,
|
||||||
|
grep: Optional[str] = None,
|
||||||
|
grep_ignorecase: bool = True,
|
||||||
|
return_all: bool = False,
|
||||||
|
ctx: Optional[Context] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""List cross-references with filtering and pagination.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_addr: Filter references to this address (hex)
|
||||||
|
from_addr: Filter references from this address (hex)
|
||||||
|
type: Filter by reference type ("CALL", "READ", "WRITE", etc.)
|
||||||
|
port: Ghidra instance port (optional)
|
||||||
|
page_size: Items per page (default: 50, max: 500)
|
||||||
|
grep: Regex pattern to filter results
|
||||||
|
grep_ignorecase: Case-insensitive grep (default: True)
|
||||||
|
return_all: Return all results without pagination
|
||||||
|
ctx: FastMCP context (auto-injected)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Cross-references with pagination metadata
|
||||||
|
"""
|
||||||
|
if not to_addr and not from_addr:
|
||||||
|
return {
|
||||||
|
"success": False,
|
||||||
|
"error": {
|
||||||
|
"code": "MISSING_PARAMETER",
|
||||||
|
"message": "Either to_addr or from_addr parameter is required",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"success": False, "error": {"code": "NO_INSTANCE", "message": str(e)}}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
params = {"offset": 0, "limit": 10000}
|
||||||
|
if to_addr:
|
||||||
|
params["to_addr"] = to_addr
|
||||||
|
if from_addr:
|
||||||
|
params["from_addr"] = from_addr
|
||||||
|
if type:
|
||||||
|
params["type"] = type
|
||||||
|
|
||||||
|
response = self.safe_get(port, "xrefs", params)
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
all_xrefs = simplified.get("result", [])
|
||||||
|
if not isinstance(all_xrefs, list):
|
||||||
|
all_xrefs = []
|
||||||
|
|
||||||
|
query_params = {
|
||||||
|
"tool": "xrefs_list",
|
||||||
|
"port": port,
|
||||||
|
"to_addr": to_addr,
|
||||||
|
"from_addr": from_addr,
|
||||||
|
"type": type,
|
||||||
|
"grep": grep,
|
||||||
|
}
|
||||||
|
session_id = self._get_session_id(ctx)
|
||||||
|
|
||||||
|
return self.paginate_response(
|
||||||
|
data=all_xrefs,
|
||||||
|
query_params=query_params,
|
||||||
|
tool_name="xrefs_list",
|
||||||
|
session_id=session_id,
|
||||||
|
page_size=min(page_size, config.max_page_size),
|
||||||
|
grep=grep,
|
||||||
|
grep_ignorecase=grep_ignorecase,
|
||||||
|
return_all=return_all,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Resources
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}/xrefs/to/{address}")
|
||||||
|
def resource_xrefs_to(
|
||||||
|
self,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
address: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""MCP Resource: Get references to an address (capped).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
address: Target address
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
References to the address (capped at 1000)
|
||||||
|
"""
|
||||||
|
if not address:
|
||||||
|
return {"error": "Address is required"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
cap = config.resource_caps.get("xrefs", 1000)
|
||||||
|
|
||||||
|
response = self.safe_get(port, "xrefs", {"to_addr": address, "limit": cap})
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
xrefs = simplified.get("result", [])
|
||||||
|
if not isinstance(xrefs, list):
|
||||||
|
xrefs = []
|
||||||
|
|
||||||
|
return {
|
||||||
|
"address": address,
|
||||||
|
"xrefs_to": xrefs[:cap],
|
||||||
|
"count": len(xrefs),
|
||||||
|
"capped_at": cap if len(xrefs) >= cap else None,
|
||||||
|
"_hint": "Use xrefs_list(to_addr=...) for full pagination" if len(xrefs) >= cap else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
@mcp_resource(uri="ghidra://instance/{port}/xrefs/from/{address}")
|
||||||
|
def resource_xrefs_from(
|
||||||
|
self,
|
||||||
|
port: Optional[int] = None,
|
||||||
|
address: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""MCP Resource: Get references from an address (capped).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
port: Ghidra instance port
|
||||||
|
address: Source address
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
References from the address (capped at 1000)
|
||||||
|
"""
|
||||||
|
if not address:
|
||||||
|
return {"error": "Address is required"}
|
||||||
|
|
||||||
|
try:
|
||||||
|
port = self.get_instance_port(port)
|
||||||
|
except ValueError as e:
|
||||||
|
return {"error": str(e)}
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
cap = config.resource_caps.get("xrefs", 1000)
|
||||||
|
|
||||||
|
response = self.safe_get(port, "xrefs", {"from_addr": address, "limit": cap})
|
||||||
|
simplified = self.simplify_response(response)
|
||||||
|
|
||||||
|
if not simplified.get("success", True):
|
||||||
|
return simplified
|
||||||
|
|
||||||
|
xrefs = simplified.get("result", [])
|
||||||
|
if not isinstance(xrefs, list):
|
||||||
|
xrefs = []
|
||||||
|
|
||||||
|
return {
|
||||||
|
"address": address,
|
||||||
|
"xrefs_from": xrefs[:cap],
|
||||||
|
"count": len(xrefs),
|
||||||
|
"capped_at": cap if len(xrefs) >= cap else None,
|
||||||
|
"_hint": "Use xrefs_list(from_addr=...) for full pagination" if len(xrefs) >= cap else None,
|
||||||
|
}
|
||||||
183
src/ghydramcp/server.py
Normal file
183
src/ghydramcp/server.py
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
"""GhydraMCP Server - FastMCP server composing all mixins.
|
||||||
|
|
||||||
|
This module creates and configures the FastMCP server by composing
|
||||||
|
all domain-specific mixins into a single MCP server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastmcp import FastMCP
|
||||||
|
|
||||||
|
from .config import get_config, set_config, GhydraConfig
|
||||||
|
from .mixins import (
|
||||||
|
InstancesMixin,
|
||||||
|
FunctionsMixin,
|
||||||
|
DataMixin,
|
||||||
|
StructsMixin,
|
||||||
|
AnalysisMixin,
|
||||||
|
MemoryMixin,
|
||||||
|
XrefsMixin,
|
||||||
|
CursorsMixin,
|
||||||
|
DockerMixin,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_server(
|
||||||
|
name: str = "GhydraMCP",
|
||||||
|
config: Optional[GhydraConfig] = None,
|
||||||
|
) -> FastMCP:
|
||||||
|
"""Create and configure the GhydraMCP server.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Server name
|
||||||
|
config: Optional configuration override
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configured FastMCP server instance
|
||||||
|
"""
|
||||||
|
if config:
|
||||||
|
set_config(config)
|
||||||
|
|
||||||
|
# Create the FastMCP server
|
||||||
|
mcp = FastMCP(name)
|
||||||
|
|
||||||
|
# Instantiate all mixins
|
||||||
|
instances_mixin = InstancesMixin()
|
||||||
|
functions_mixin = FunctionsMixin()
|
||||||
|
data_mixin = DataMixin()
|
||||||
|
structs_mixin = StructsMixin()
|
||||||
|
analysis_mixin = AnalysisMixin()
|
||||||
|
memory_mixin = MemoryMixin()
|
||||||
|
xrefs_mixin = XrefsMixin()
|
||||||
|
cursors_mixin = CursorsMixin()
|
||||||
|
docker_mixin = DockerMixin()
|
||||||
|
|
||||||
|
# Register all mixins with the server
|
||||||
|
# Each mixin registers its tools, resources, and prompts
|
||||||
|
instances_mixin.register_all(mcp)
|
||||||
|
functions_mixin.register_all(mcp)
|
||||||
|
data_mixin.register_all(mcp)
|
||||||
|
structs_mixin.register_all(mcp)
|
||||||
|
analysis_mixin.register_all(mcp)
|
||||||
|
memory_mixin.register_all(mcp)
|
||||||
|
xrefs_mixin.register_all(mcp)
|
||||||
|
cursors_mixin.register_all(mcp)
|
||||||
|
docker_mixin.register_all(mcp)
|
||||||
|
|
||||||
|
return mcp
|
||||||
|
|
||||||
|
|
||||||
|
def _periodic_discovery(interval: int = 30):
|
||||||
|
"""Background thread for periodic instance discovery.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
interval: Seconds between discovery attempts
|
||||||
|
"""
|
||||||
|
from .mixins.base import GhydraMixinBase
|
||||||
|
from .core.http_client import safe_get
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
time.sleep(interval)
|
||||||
|
try:
|
||||||
|
# Quick scan of common ports
|
||||||
|
for port in config.quick_discovery_range:
|
||||||
|
try:
|
||||||
|
response = safe_get(port, "")
|
||||||
|
if response.get("success", False):
|
||||||
|
with GhydraMixinBase._instances_lock:
|
||||||
|
if port not in GhydraMixinBase._instances:
|
||||||
|
GhydraMixinBase._instances[port] = {
|
||||||
|
"url": f"http://{config.ghidra_host}:{port}",
|
||||||
|
"project": response.get("project", ""),
|
||||||
|
"file": response.get("file", ""),
|
||||||
|
"discovered_at": time.time(),
|
||||||
|
}
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_sigint(signum, frame):
|
||||||
|
"""Handle SIGINT gracefully."""
|
||||||
|
print("\nShutting down GhydraMCP...", file=sys.stderr)
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main entry point for the GhydraMCP server."""
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib.metadata import version
|
||||||
|
package_version = version("ghydramcp")
|
||||||
|
except Exception:
|
||||||
|
package_version = "2025.12.1"
|
||||||
|
|
||||||
|
print(f"🔬 GhydraMCP v{package_version}", file=sys.stderr)
|
||||||
|
print(" AI-assisted reverse engineering bridge for Ghidra", file=sys.stderr)
|
||||||
|
|
||||||
|
# Check Docker availability
|
||||||
|
docker_available = shutil.which("docker") is not None
|
||||||
|
if docker_available:
|
||||||
|
print(" 🐳 Docker available (use docker_* tools for container management)", file=sys.stderr)
|
||||||
|
else:
|
||||||
|
print(" ⚠ Docker not found (container management disabled)", file=sys.stderr)
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
# Create and configure the server
|
||||||
|
mcp = create_server()
|
||||||
|
|
||||||
|
# Initial instance discovery
|
||||||
|
print(f" Discovering Ghidra instances on {config.ghidra_host}...", file=sys.stderr)
|
||||||
|
|
||||||
|
from .mixins.base import GhydraMixinBase
|
||||||
|
from .core.http_client import safe_get
|
||||||
|
|
||||||
|
found = 0
|
||||||
|
for port in config.quick_discovery_range:
|
||||||
|
try:
|
||||||
|
response = safe_get(port, "")
|
||||||
|
if response.get("success", False):
|
||||||
|
GhydraMixinBase._instances[port] = {
|
||||||
|
"url": f"http://{config.ghidra_host}:{port}",
|
||||||
|
"project": response.get("project", ""),
|
||||||
|
"file": response.get("file", ""),
|
||||||
|
"discovered_at": time.time(),
|
||||||
|
}
|
||||||
|
found += 1
|
||||||
|
print(f" ✓ Found instance on port {port}", file=sys.stderr)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if found == 0:
|
||||||
|
print(" ⚠ No Ghidra instances found (they can be discovered later)", file=sys.stderr)
|
||||||
|
else:
|
||||||
|
print(f" Found {found} Ghidra instance(s)", file=sys.stderr)
|
||||||
|
|
||||||
|
# Start background discovery thread
|
||||||
|
discovery_thread = threading.Thread(
|
||||||
|
target=_periodic_discovery,
|
||||||
|
daemon=True,
|
||||||
|
name="GhydraMCP-Discovery",
|
||||||
|
)
|
||||||
|
discovery_thread.start()
|
||||||
|
|
||||||
|
# Set up signal handler
|
||||||
|
signal.signal(signal.SIGINT, _handle_sigint)
|
||||||
|
|
||||||
|
print(" Starting MCP server...", file=sys.stderr)
|
||||||
|
|
||||||
|
# Run the server
|
||||||
|
mcp.run(transport="stdio")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Loading…
x
Reference in New Issue
Block a user