refactor: Remove deprecated files and clean up codebase

This commit deletes obsolete files related to the Basecamp MCP integration, including:
- `basecamp_client.py`
- `basecamp_oauth.py`
- `composio_client_example.py`
- `composio_integration.py`
- `mcp_integration.py`
- `setup.sh`
- `start_basecamp_mcp.sh`

Additionally, a new file `mcp_server_cli.py` is introduced to streamline the MCP server functionality. The README has been updated to reflect these changes and provide clearer setup instructions.

This cleanup aims to enhance maintainability and focus on the core components of the integration.
This commit is contained in:
George Antonopoulos
2025-06-02 17:11:39 +01:00
parent b0deac4d87
commit b3a6efc5d7
16 changed files with 1050 additions and 2732 deletions

View File

@@ -10,21 +10,31 @@ import os
import json
import threading
from datetime import datetime, timedelta
import logging
# Token storage file - in production, use a database instead
TOKEN_FILE = 'oauth_tokens.json'
# Determine the directory where this script (token_storage.py) is located
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# Define TOKEN_FILE as an absolute path within that directory
TOKEN_FILE = os.path.join(SCRIPT_DIR, 'oauth_tokens.json')
# Lock for thread-safe operations
_lock = threading.Lock()
_logger = logging.getLogger(__name__)
def _read_tokens():
"""Read tokens from storage."""
try:
with open(TOKEN_FILE, 'r') as f:
return json.load(f)
data = json.load(f)
basecamp_data = data.get('basecamp', {})
updated_at = basecamp_data.get('updated_at')
_logger.info(f"Read tokens from {TOKEN_FILE}. Basecamp token updated_at: {updated_at}")
return data
except FileNotFoundError:
_logger.info(f"{TOKEN_FILE} not found. Returning empty tokens.")
return {} # Return empty dict if file doesn't exist
except json.JSONDecodeError:
_logger.warning(f"Error decoding JSON from {TOKEN_FILE}. Returning empty tokens.")
# If file exists but isn't valid JSON, return empty dict
return {}
@@ -33,6 +43,10 @@ def _write_tokens(tokens):
# Create directory for the token file if it doesn't exist
os.makedirs(os.path.dirname(TOKEN_FILE) if os.path.dirname(TOKEN_FILE) else '.', exist_ok=True)
basecamp_data_to_write = tokens.get('basecamp', {})
updated_at_to_write = basecamp_data_to_write.get('updated_at')
_logger.info(f"Writing tokens to {TOKEN_FILE}. Basecamp token updated_at to be written: {updated_at_to_write}")
# Set secure permissions on the file
with open(TOKEN_FILE, 'w') as f:
json.dump(tokens, f, indent=2)