Merge pull request #15 from mamedov/feature/comments-pagination
Add pagination support to get_comments
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
import requests
|
||||
from dotenv import load_dotenv
|
||||
@@ -405,21 +406,48 @@ class BasecampClient:
|
||||
raise Exception(f"Failed to get schedule: {str(e)}")
|
||||
|
||||
# Comments methods
|
||||
def get_comments(self, project_id, recording_id):
|
||||
def get_comments(self, project_id, recording_id, page=1):
|
||||
"""
|
||||
Get all comments for a recording (todos, message, etc.).
|
||||
|
||||
Get comments for a recording (todos, message, etc.).
|
||||
|
||||
Args:
|
||||
project_id (int): Project/bucket ID.
|
||||
recording_id (int): ID of the recording (todos, message, etc.)
|
||||
project_id (int): Project/bucket ID. If not provided, it will be extracted from the recording ID.
|
||||
|
||||
page (int): Page number for pagination (default: 1).
|
||||
Basecamp uses geared pagination: page 1 has 15 results,
|
||||
page 2 has 30, page 3 has 50, page 4+ has 100.
|
||||
|
||||
Returns:
|
||||
list: Comments for the recording
|
||||
dict: Contains 'comments' list and pagination metadata:
|
||||
- comments: list of comments
|
||||
- total_count: total number of comments (from X-Total-Count header)
|
||||
- next_page: next page number if available, None otherwise
|
||||
"""
|
||||
if page < 1:
|
||||
raise ValueError("page must be >= 1")
|
||||
endpoint = f"buckets/{project_id}/recordings/{recording_id}/comments.json"
|
||||
response = self.get(endpoint)
|
||||
response = self.get(endpoint, params={"page": page})
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
# Parse pagination headers
|
||||
total_count = response.headers.get('X-Total-Count')
|
||||
total_count = int(total_count) if total_count else None
|
||||
|
||||
# Parse Link header for next page
|
||||
next_page = None
|
||||
link_header = response.headers.get('Link', '')
|
||||
# Split by comma to handle multiple links (e.g., rel="prev", rel="next")
|
||||
for link in link_header.split(','):
|
||||
if 'rel="next"' in link:
|
||||
match = re.search(r'page=(\d+)', link)
|
||||
if match:
|
||||
next_page = int(match.group(1))
|
||||
break
|
||||
|
||||
return {
|
||||
"comments": response.json(),
|
||||
"total_count": total_count,
|
||||
"next_page": next_page
|
||||
}
|
||||
else:
|
||||
raise Exception(f"Failed to get comments: {response.status_code} - {response.text}")
|
||||
|
||||
|
||||
@@ -500,23 +500,28 @@ async def global_search(query: str) -> Dict[str, Any]:
|
||||
}
|
||||
|
||||
@mcp.tool()
|
||||
async def get_comments(recording_id: str, project_id: str) -> Dict[str, Any]:
|
||||
async def get_comments(recording_id: str, project_id: str, page: int = 1) -> Dict[str, Any]:
|
||||
"""Get comments for a Basecamp item.
|
||||
|
||||
|
||||
Args:
|
||||
recording_id: The item ID
|
||||
project_id: The project ID
|
||||
page: Page number for pagination (default: 1). Basecamp uses geared pagination:
|
||||
page 1 has 15 results, page 2 has 30, page 3 has 50, page 4+ has 100.
|
||||
"""
|
||||
client = _get_basecamp_client()
|
||||
if not client:
|
||||
return _get_auth_error_response()
|
||||
|
||||
|
||||
try:
|
||||
comments = await _run_sync(client.get_comments, project_id, recording_id)
|
||||
result = await _run_sync(client.get_comments, project_id, recording_id, page)
|
||||
return {
|
||||
"status": "success",
|
||||
"comments": comments,
|
||||
"count": len(comments)
|
||||
"comments": result["comments"],
|
||||
"count": len(result["comments"]),
|
||||
"page": page,
|
||||
"total_count": result["total_count"],
|
||||
"next_page": result["next_page"]
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting comments: {e}")
|
||||
|
||||
@@ -193,7 +193,8 @@ class MCPServer:
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"recording_id": {"type": "string", "description": "The item ID"},
|
||||
"project_id": {"type": "string", "description": "The project ID"}
|
||||
"project_id": {"type": "string", "description": "The project ID"},
|
||||
"page": {"type": "integer", "description": "Page number for pagination (default: 1). Basecamp uses geared pagination: page 1 has 15 results, page 2 has 30, page 3 has 50, page 4+ has 100.", "default": 1}
|
||||
},
|
||||
"required": ["recording_id", "project_id"]
|
||||
}
|
||||
@@ -1025,11 +1026,15 @@ class MCPServer:
|
||||
elif tool_name == "get_comments":
|
||||
recording_id = arguments.get("recording_id")
|
||||
project_id = arguments.get("project_id")
|
||||
comments = client.get_comments(project_id, recording_id)
|
||||
page = arguments.get("page", 1)
|
||||
result = client.get_comments(project_id, recording_id, page)
|
||||
return {
|
||||
"status": "success",
|
||||
"comments": comments,
|
||||
"count": len(comments)
|
||||
"comments": result["comments"],
|
||||
"count": len(result["comments"]),
|
||||
"page": page,
|
||||
"total_count": result["total_count"],
|
||||
"next_page": result["next_page"]
|
||||
}
|
||||
|
||||
elif tool_name == "create_comment":
|
||||
|
||||
@@ -452,7 +452,7 @@ class BasecampSearch:
|
||||
logger.error(f"Error searching schedule entries: {str(e)}")
|
||||
return []
|
||||
|
||||
def search_comments(self, query=None, recording_id=None, bucket_id=None):
|
||||
def search_comments(self, query=None, recording_id=None, bucket_id=None, page=1):
|
||||
"""
|
||||
Search for comments across resources or for a specific resource.
|
||||
|
||||
@@ -460,22 +460,35 @@ class BasecampSearch:
|
||||
query (str, optional): Search term to filter comments
|
||||
recording_id (int, optional): ID of the recording (todo, message, etc.) to search in
|
||||
bucket_id (int, optional): Project/bucket ID
|
||||
page (int, optional): Page number for pagination (default: 1)
|
||||
|
||||
Returns:
|
||||
list: Matching comments
|
||||
dict: Contains 'comments' list (filtered if query provided) and pagination metadata:
|
||||
- comments: list of matching comments
|
||||
- total_count: total number of comments (from API)
|
||||
- next_page: next page number if available, None otherwise
|
||||
"""
|
||||
try:
|
||||
# If both recording_id and bucket_id are provided, get comments for that specific recording
|
||||
if recording_id and bucket_id:
|
||||
comments = self.client.get_comments(recording_id, bucket_id)
|
||||
result = self.client.get_comments(bucket_id, recording_id, page)
|
||||
comments = result["comments"]
|
||||
pagination = {
|
||||
"total_count": result["total_count"],
|
||||
"next_page": result["next_page"]
|
||||
}
|
||||
# Otherwise we can't search across all comments as there's no endpoint for that
|
||||
else:
|
||||
logger.warning("Cannot search all comments across Basecamp - both recording_id and bucket_id are required")
|
||||
return [{
|
||||
"content": "To search comments, you need to specify both a recording ID (todo, message, etc.) and a bucket ID. Comments cannot be searched globally in Basecamp.",
|
||||
"api_limitation": True,
|
||||
"title": "Comment Search Limitation"
|
||||
}]
|
||||
return {
|
||||
"comments": [{
|
||||
"content": "To search comments, you need to specify both a recording ID (todo, message, etc.) and a bucket ID. Comments cannot be searched globally in Basecamp.",
|
||||
"api_limitation": True,
|
||||
"title": "Comment Search Limitation"
|
||||
}],
|
||||
"total_count": None,
|
||||
"next_page": None
|
||||
}
|
||||
|
||||
# Filter by query if provided
|
||||
if query and comments:
|
||||
@@ -499,12 +512,18 @@ class BasecampSearch:
|
||||
if content_matched:
|
||||
filtered_comments.append(comment)
|
||||
|
||||
return filtered_comments
|
||||
return {
|
||||
"comments": filtered_comments,
|
||||
**pagination
|
||||
}
|
||||
|
||||
return comments
|
||||
return {
|
||||
"comments": comments,
|
||||
**pagination
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching comments: {str(e)}")
|
||||
return []
|
||||
return {"comments": [], "total_count": None, "next_page": None}
|
||||
|
||||
def search_campfire_lines(self, query=None, project_id=None, campfire_id=None):
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user