fix: resolve import issues for PyPI platform tools
- Made feedparser import optional in discovery.py with graceful fallback - Fixed GitHubClient import to use correct GitHubAPIClient class name - Made server import optional in __init__.py to allow tool imports without fastmcp dependency - Added warning when feedparser is not available for RSS functionality - All tool modules now importable without external dependencies This allows the MCP server to start even when optional dependencies are missing, providing graceful degradation of functionality.
This commit is contained in:
parent
431abcbbe6
commit
03366b5cdd
@ -8,6 +8,10 @@ __version__ = "0.1.0"
|
|||||||
__author__ = "Hal"
|
__author__ = "Hal"
|
||||||
__email__ = "hal.long@outlook.com"
|
__email__ = "hal.long@outlook.com"
|
||||||
|
|
||||||
|
try:
|
||||||
from pypi_query_mcp.server import mcp
|
from pypi_query_mcp.server import mcp
|
||||||
|
|
||||||
__all__ = ["mcp", "__version__"]
|
__all__ = ["mcp", "__version__"]
|
||||||
|
except ImportError:
|
||||||
|
# Server dependencies not available (fastmcp, etc.)
|
||||||
|
# Tools can still be imported individually
|
||||||
|
__all__ = ["__version__"]
|
||||||
|
@ -17,7 +17,7 @@ from ..core.exceptions import (
|
|||||||
PyPIError,
|
PyPIError,
|
||||||
)
|
)
|
||||||
from ..core.pypi_client import PyPIClient
|
from ..core.pypi_client import PyPIClient
|
||||||
from ..core.github_client import GitHubClient
|
from ..core.github_client import GitHubAPIClient
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -417,7 +417,7 @@ async def _analyze_github_community_sentiment(package_name: str) -> Dict[str, An
|
|||||||
return {"status": "no_github_repository"}
|
return {"status": "no_github_repository"}
|
||||||
|
|
||||||
# Use GitHub client to get community data
|
# Use GitHub client to get community data
|
||||||
async with GitHubClient() as github_client:
|
async with GitHubAPIClient() as github_client:
|
||||||
# Get recent issues for sentiment analysis
|
# Get recent issues for sentiment analysis
|
||||||
issues_data = await github_client.get_repository_issues(
|
issues_data = await github_client.get_repository_issues(
|
||||||
github_info["owner"],
|
github_info["owner"],
|
||||||
@ -530,7 +530,7 @@ async def _get_community_health_metrics(package_name: str) -> Dict[str, Any]:
|
|||||||
github_info = await _find_github_repository(package_name)
|
github_info = await _find_github_repository(package_name)
|
||||||
|
|
||||||
if github_info.get("repository_url"):
|
if github_info.get("repository_url"):
|
||||||
async with GitHubClient() as github_client:
|
async with GitHubAPIClient() as github_client:
|
||||||
# Get community health data
|
# Get community health data
|
||||||
community_profile = await github_client.get_community_profile(
|
community_profile = await github_client.get_community_profile(
|
||||||
github_info["owner"],
|
github_info["owner"],
|
||||||
@ -877,7 +877,7 @@ async def _analyze_github_maintainer_info(package_name: str) -> Dict[str, Any]:
|
|||||||
if not github_info.get("repository_url"):
|
if not github_info.get("repository_url"):
|
||||||
return {"status": "no_github_repository"}
|
return {"status": "no_github_repository"}
|
||||||
|
|
||||||
async with GitHubClient() as github_client:
|
async with GitHubAPIClient() as github_client:
|
||||||
# Get repository information
|
# Get repository information
|
||||||
repo_data = await github_client.get_repository_info(
|
repo_data = await github_client.get_repository_info(
|
||||||
github_info["owner"],
|
github_info["owner"],
|
||||||
@ -921,7 +921,7 @@ async def _get_support_channels(package_name: str) -> Dict[str, Any]:
|
|||||||
support_channels["issue_tracker"] = f"{github_info['repository_url']}/issues"
|
support_channels["issue_tracker"] = f"{github_info['repository_url']}/issues"
|
||||||
|
|
||||||
# Check for documentation links
|
# Check for documentation links
|
||||||
async with GitHubClient() as github_client:
|
async with GitHubAPIClient() as github_client:
|
||||||
repo_data = await github_client.get_repository_info(
|
repo_data = await github_client.get_repository_info(
|
||||||
github_info["owner"],
|
github_info["owner"],
|
||||||
github_info["repo"]
|
github_info["repo"]
|
||||||
@ -977,7 +977,7 @@ async def _get_contribution_guidelines(package_name: str) -> Dict[str, Any]:
|
|||||||
if not github_info.get("repository_url"):
|
if not github_info.get("repository_url"):
|
||||||
return {"status": "no_repository"}
|
return {"status": "no_repository"}
|
||||||
|
|
||||||
async with GitHubClient() as github_client:
|
async with GitHubAPIClient() as github_client:
|
||||||
# Check for common contribution files
|
# Check for common contribution files
|
||||||
contribution_files = await github_client.get_community_files(
|
contribution_files = await github_client.get_community_files(
|
||||||
github_info["owner"],
|
github_info["owner"],
|
||||||
|
@ -9,7 +9,15 @@ from typing import Any, Dict, List, Optional, Set
|
|||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
|
try:
|
||||||
from feedparser import parse as parse_feed
|
from feedparser import parse as parse_feed
|
||||||
|
HAS_FEEDPARSER = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_FEEDPARSER = False
|
||||||
|
def parse_feed(url_or_content):
|
||||||
|
"""Fallback when feedparser is not available."""
|
||||||
|
return {"entries": []}
|
||||||
|
|
||||||
from ..core.exceptions import InvalidPackageNameError, NetworkError, SearchError
|
from ..core.exceptions import InvalidPackageNameError, NetworkError, SearchError
|
||||||
from ..core.pypi_client import PyPIClient
|
from ..core.pypi_client import PyPIClient
|
||||||
@ -577,6 +585,18 @@ async def _fetch_recent_releases_from_rss(hours: int) -> List[Dict[str, Any]]:
|
|||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
# Parse RSS feed
|
# Parse RSS feed
|
||||||
|
if not HAS_FEEDPARSER:
|
||||||
|
logger.warning("feedparser not available - RSS monitoring limited")
|
||||||
|
return {
|
||||||
|
"new_releases": [],
|
||||||
|
"time_period": f"last {hours} hours",
|
||||||
|
"note": "RSS parsing unavailable - feedparser dependency missing",
|
||||||
|
"fallback_used": True,
|
||||||
|
"total_found": 0,
|
||||||
|
"category": category,
|
||||||
|
"timestamp": datetime.utcnow().isoformat()
|
||||||
|
}
|
||||||
|
|
||||||
feed = parse_feed(response.content)
|
feed = parse_feed(response.content)
|
||||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user