From 4ac78a996cb4ec4e1af161e390de4af4645bca76 Mon Sep 17 00:00:00 2001 From: Raduan77 Date: Thu, 28 Nov 2024 09:51:03 +0100 Subject: [PATCH 01/43] code for search in github --- src/github/index.ts | 542 ++++++++++++++++++++++++++-------------- src/github/package.json | 2 + src/github/schemas.ts | 351 ++++++++++++++++++++------ 3 files changed, 631 insertions(+), 264 deletions(-) diff --git a/src/github/index.ts b/src/github/index.ts index 0676a34c..bddd9f46 100644 --- a/src/github/index.ts +++ b/src/github/index.ts @@ -41,19 +41,32 @@ import { CreateIssueSchema, CreatePullRequestSchema, ForkRepositorySchema, - CreateBranchSchema -} from './schemas.js'; -import { z } from 'zod'; -import { zodToJsonSchema } from 'zod-to-json-schema'; - -const server = new Server({ - name: "github-mcp-server", - version: "0.1.0", -}, { - capabilities: { - tools: {} + CreateBranchSchema, + SearchCodeSchema, + SearchIssuesSchema, + SearchUsersSchema, + SearchCodeResponseSchema, + SearchIssuesResponseSchema, + SearchUsersResponseSchema, + type SearchCodeResponse, + type SearchIssuesResponse, + type SearchUsersResponse, +} from "./schemas.js"; +import { zodToJsonSchema } from "zod-to-json-schema"; +import { z } from "zod"; +import type { CallToolRequest } from "@modelcontextprotocol/sdk/types.js"; + +const server = new Server( + { + name: "github-mcp-server", + version: "0.1.0", + }, + { + capabilities: { + tools: {}, + }, } -}); +); const GITHUB_PERSONAL_ACCESS_TOKEN = process.env.GITHUB_PERSONAL_ACCESS_TOKEN; @@ -67,17 +80,17 @@ async function forkRepository( repo: string, organization?: string ): Promise { - const url = organization + const url = organization ? `https://api.github.com/repos/${owner}/${repo}/forks?organization=${organization}` : `https://api.github.com/repos/${owner}/${repo}/forks`; const response = await fetch(url, { method: "POST", headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server" - } + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, }); if (!response.ok) { @@ -93,21 +106,21 @@ async function createBranch( options: z.infer ): Promise { const fullRef = `refs/heads/${options.ref}`; - + const response = await fetch( `https://api.github.com/repos/${owner}/${repo}/git/refs`, { method: "POST", headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", "User-Agent": "github-mcp-server", - "Content-Type": "application/json" + "Content-Type": "application/json", }, body: JSON.stringify({ ref: fullRef, - sha: options.sha - }) + sha: options.sha, + }), } ); @@ -126,10 +139,10 @@ async function getDefaultBranchSHA( `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/main`, { headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server" - } + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, } ); @@ -138,15 +151,17 @@ async function getDefaultBranchSHA( `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/master`, { headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server" - } + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, } ); if (!masterResponse.ok) { - throw new Error("Could not find default branch (tried 'main' and 'master')"); + throw new Error( + "Could not find default branch (tried 'main' and 'master')" + ); } const data = GitHubReferenceSchema.parse(await masterResponse.json()); @@ -170,10 +185,10 @@ async function getFileContents( const response = await fetch(url, { headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server" - } + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, }); if (!response.ok) { @@ -184,7 +199,7 @@ async function getFileContents( // If it's a file, decode the content if (!Array.isArray(data) && data.content) { - data.content = Buffer.from(data.content, 'base64').toString('utf8'); + data.content = Buffer.from(data.content, "base64").toString("utf8"); } return data; @@ -200,12 +215,12 @@ async function createIssue( { method: "POST", headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", "User-Agent": "github-mcp-server", - "Content-Type": "application/json" + "Content-Type": "application/json", }, - body: JSON.stringify(options) + body: JSON.stringify(options), } ); @@ -226,12 +241,12 @@ async function createPullRequest( { method: "POST", headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", "User-Agent": "github-mcp-server", - "Content-Type": "application/json" + "Content-Type": "application/json", }, - body: JSON.stringify(options) + body: JSON.stringify(options), } ); @@ -251,7 +266,7 @@ async function createOrUpdateFile( branch: string, sha?: string ): Promise { - const encodedContent = Buffer.from(content).toString('base64'); + const encodedContent = Buffer.from(content).toString("base64"); let currentSha = sha; if (!currentSha) { @@ -261,28 +276,30 @@ async function createOrUpdateFile( currentSha = existingFile.sha; } } catch (error) { - console.error('Note: File does not exist in branch, will create new file'); + console.error( + "Note: File does not exist in branch, will create new file" + ); } } const url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`; - + const body = { message, content: encodedContent, branch, - ...(currentSha ? { sha: currentSha } : {}) + ...(currentSha ? { sha: currentSha } : {}), }; const response = await fetch(url, { method: "PUT", headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", "User-Agent": "github-mcp-server", - "Content-Type": "application/json" + "Content-Type": "application/json", }, - body: JSON.stringify(body) + body: JSON.stringify(body), }); if (!response.ok) { @@ -298,11 +315,11 @@ async function createTree( files: FileOperation[], baseTree?: string ): Promise { - const tree = files.map(file => ({ + const tree = files.map((file) => ({ path: file.path, - mode: '100644' as const, - type: 'blob' as const, - content: file.content + mode: "100644" as const, + type: "blob" as const, + content: file.content, })); const response = await fetch( @@ -310,15 +327,15 @@ async function createTree( { method: "POST", headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", "User-Agent": "github-mcp-server", - "Content-Type": "application/json" + "Content-Type": "application/json", }, body: JSON.stringify({ tree, - base_tree: baseTree - }) + base_tree: baseTree, + }), } ); @@ -341,16 +358,16 @@ async function createCommit( { method: "POST", headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", "User-Agent": "github-mcp-server", - "Content-Type": "application/json" + "Content-Type": "application/json", }, body: JSON.stringify({ message, tree, - parents - }) + parents, + }), } ); @@ -372,15 +389,15 @@ async function updateReference( { method: "PATCH", headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", "User-Agent": "github-mcp-server", - "Content-Type": "application/json" + "Content-Type": "application/json", }, body: JSON.stringify({ sha, - force: true - }) + force: true, + }), } ); @@ -402,10 +419,10 @@ async function pushFiles( `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`, { headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server" - } + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, } ); @@ -417,7 +434,9 @@ async function pushFiles( const commitSha = ref.object.sha; const tree = await createTree(owner, repo, files, commitSha); - const commit = await createCommit(owner, repo, message, tree.sha, [commitSha]); + const commit = await createCommit(owner, repo, message, tree.sha, [ + commitSha, + ]); return await updateReference(owner, repo, `heads/${branch}`, commit.sha); } @@ -433,10 +452,10 @@ async function searchRepositories( const response = await fetch(url.toString(), { headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server" - } + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, }); if (!response.ok) { @@ -452,12 +471,12 @@ async function createRepository( const response = await fetch("https://api.github.com/user/repos", { method: "POST", headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", "User-Agent": "github-mcp-server", - "Content-Type": "application/json" + "Content-Type": "application/json", }, - body: JSON.stringify(options) + body: JSON.stringify(options), }); if (!response.ok) { @@ -467,172 +486,307 @@ async function createRepository( return GitHubRepositorySchema.parse(await response.json()); } +async function searchCode( + params: z.infer +): Promise { + const url = new URL("https://api.github.com/search/code"); + Object.entries(params).forEach(([key, value]) => { + if (value !== undefined && value !== null) { + url.searchParams.append(key, value.toString()); + } + }); + + const response = await fetch(url.toString(), { + headers: { + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, + }); + + if (!response.ok) { + throw new Error(`GitHub API error: ${response.statusText}`); + } + + return SearchCodeResponseSchema.parse(await response.json()); +} + +async function searchIssues( + params: z.infer +): Promise { + const url = new URL("https://api.github.com/search/issues"); + Object.entries(params).forEach(([key, value]) => { + if (value !== undefined && value !== null) { + url.searchParams.append(key, value.toString()); + } + }); + + const response = await fetch(url.toString(), { + headers: { + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, + }); + + if (!response.ok) { + throw new Error(`GitHub API error: ${response.statusText}`); + } + + return SearchIssuesResponseSchema.parse(await response.json()); +} + +async function searchUsers( + params: z.infer +): Promise { + const url = new URL("https://api.github.com/search/users"); + Object.entries(params).forEach(([key, value]) => { + if (value !== undefined && value !== null) { + url.searchParams.append(key, value.toString()); + } + }); + + const response = await fetch(url.toString(), { + headers: { + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, + }); + + if (!response.ok) { + throw new Error(`GitHub API error: ${response.statusText}`); + } + + return SearchUsersResponseSchema.parse(await response.json()); +} + server.setRequestHandler(ListToolsRequestSchema, async () => { return { tools: [ { name: "create_or_update_file", description: "Create or update a single file in a GitHub repository", - inputSchema: zodToJsonSchema(CreateOrUpdateFileSchema) + inputSchema: zodToJsonSchema(CreateOrUpdateFileSchema), }, { name: "search_repositories", description: "Search for GitHub repositories", - inputSchema: zodToJsonSchema(SearchRepositoriesSchema) + inputSchema: zodToJsonSchema(SearchRepositoriesSchema), }, { name: "create_repository", description: "Create a new GitHub repository in your account", - inputSchema: zodToJsonSchema(CreateRepositorySchema) + inputSchema: zodToJsonSchema(CreateRepositorySchema), }, { name: "get_file_contents", - description: "Get the contents of a file or directory from a GitHub repository", - inputSchema: zodToJsonSchema(GetFileContentsSchema) + description: + "Get the contents of a file or directory from a GitHub repository", + inputSchema: zodToJsonSchema(GetFileContentsSchema), }, { name: "push_files", - description: "Push multiple files to a GitHub repository in a single commit", - inputSchema: zodToJsonSchema(PushFilesSchema) + description: + "Push multiple files to a GitHub repository in a single commit", + inputSchema: zodToJsonSchema(PushFilesSchema), }, { name: "create_issue", description: "Create a new issue in a GitHub repository", - inputSchema: zodToJsonSchema(CreateIssueSchema) + inputSchema: zodToJsonSchema(CreateIssueSchema), }, { name: "create_pull_request", description: "Create a new pull request in a GitHub repository", - inputSchema: zodToJsonSchema(CreatePullRequestSchema) + inputSchema: zodToJsonSchema(CreatePullRequestSchema), }, { name: "fork_repository", - description: "Fork a GitHub repository to your account or specified organization", - inputSchema: zodToJsonSchema(ForkRepositorySchema) + description: + "Fork a GitHub repository to your account or specified organization", + inputSchema: zodToJsonSchema(ForkRepositorySchema), }, { name: "create_branch", description: "Create a new branch in a GitHub repository", - inputSchema: zodToJsonSchema(CreateBranchSchema) - } - ] + inputSchema: zodToJsonSchema(CreateBranchSchema), + }, + { + name: "search_code", + description: "Search for code across GitHub repositories", + inputSchema: zodToJsonSchema(SearchCodeSchema), + }, + { + name: "search_issues", + description: + "Search for issues and pull requests across GitHub repositories", + inputSchema: zodToJsonSchema(SearchIssuesSchema), + }, + { + name: "search_users", + description: "Search for users on GitHub", + inputSchema: zodToJsonSchema(SearchUsersSchema), + }, + ], }; }); -server.setRequestHandler(CallToolRequestSchema, async (request) => { - try { - if (!request.params.arguments) { - throw new Error("Arguments are required"); - } - - switch (request.params.name) { - case "fork_repository": { - const args = ForkRepositorySchema.parse(request.params.arguments); - const fork = await forkRepository(args.owner, args.repo, args.organization); - return { toolResult: fork }; +server.setRequestHandler( + CallToolRequestSchema, + async (request: CallToolRequest) => { + try { + if (!request.params.arguments) { + throw new Error("Arguments are required"); } - case "create_branch": { - const args = CreateBranchSchema.parse(request.params.arguments); - let sha: string; - if (args.from_branch) { - const response = await fetch( - `https://api.github.com/repos/${args.owner}/${args.repo}/git/refs/heads/${args.from_branch}`, - { - headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server" + switch (request.params.name) { + case "fork_repository": { + const args = ForkRepositorySchema.parse(request.params.arguments); + const fork = await forkRepository( + args.owner, + args.repo, + args.organization + ); + return { toolResult: fork }; + } + + case "create_branch": { + const args = CreateBranchSchema.parse(request.params.arguments); + let sha: string; + if (args.from_branch) { + const response = await fetch( + `https://api.github.com/repos/${args.owner}/${args.repo}/git/refs/heads/${args.from_branch}`, + { + headers: { + Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + Accept: "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + }, } + ); + + if (!response.ok) { + throw new Error(`Source branch '${args.from_branch}' not found`); } - ); - if (!response.ok) { - throw new Error(`Source branch '${args.from_branch}' not found`); + const data = GitHubReferenceSchema.parse(await response.json()); + sha = data.object.sha; + } else { + sha = await getDefaultBranchSHA(args.owner, args.repo); } - const data = GitHubReferenceSchema.parse(await response.json()); - sha = data.object.sha; - } else { - sha = await getDefaultBranchSHA(args.owner, args.repo); + const branch = await createBranch(args.owner, args.repo, { + ref: args.branch, + sha, + }); + + return { toolResult: branch }; } - const branch = await createBranch(args.owner, args.repo, { - ref: args.branch, - sha - }); + case "search_repositories": { + const args = SearchRepositoriesSchema.parse(request.params.arguments); + const results = await searchRepositories( + args.query, + args.page, + args.perPage + ); + return { toolResult: results }; + } - return { toolResult: branch }; - } + case "create_repository": { + const args = CreateRepositorySchema.parse(request.params.arguments); + const repository = await createRepository(args); + return { toolResult: repository }; + } - case "search_repositories": { - const args = SearchRepositoriesSchema.parse(request.params.arguments); - const results = await searchRepositories(args.query, args.page, args.perPage); - return { toolResult: results }; - } + case "get_file_contents": { + const args = GetFileContentsSchema.parse(request.params.arguments); + const contents = await getFileContents( + args.owner, + args.repo, + args.path, + args.branch + ); + return { toolResult: contents }; + } - case "create_repository": { - const args = CreateRepositorySchema.parse(request.params.arguments); - const repository = await createRepository(args); - return { toolResult: repository }; - } + case "create_or_update_file": { + const args = CreateOrUpdateFileSchema.parse(request.params.arguments); + const result = await createOrUpdateFile( + args.owner, + args.repo, + args.path, + args.content, + args.message, + args.branch, + args.sha + ); + return { toolResult: result }; + } - case "get_file_contents": { - const args = GetFileContentsSchema.parse(request.params.arguments); - const contents = await getFileContents(args.owner, args.repo, args.path, args.branch); - return { toolResult: contents }; - } + case "push_files": { + const args = PushFilesSchema.parse(request.params.arguments); + const result = await pushFiles( + args.owner, + args.repo, + args.branch, + args.files, + args.message + ); + return { toolResult: result }; + } - case "create_or_update_file": { - const args = CreateOrUpdateFileSchema.parse(request.params.arguments); - const result = await createOrUpdateFile( - args.owner, - args.repo, - args.path, - args.content, - args.message, - args.branch, - args.sha - ); - return { toolResult: result }; - } + case "create_issue": { + const args = CreateIssueSchema.parse(request.params.arguments); + const { owner, repo, ...options } = args; + const issue = await createIssue(owner, repo, options); + return { toolResult: issue }; + } - case "push_files": { - const args = PushFilesSchema.parse(request.params.arguments); - const result = await pushFiles( - args.owner, - args.repo, - args.branch, - args.files, - args.message - ); - return { toolResult: result }; - } + case "create_pull_request": { + const args = CreatePullRequestSchema.parse(request.params.arguments); + const { owner, repo, ...options } = args; + const pullRequest = await createPullRequest(owner, repo, options); + return { toolResult: pullRequest }; + } - case "create_issue": { - const args = CreateIssueSchema.parse(request.params.arguments); - const { owner, repo, ...options } = args; - const issue = await createIssue(owner, repo, options); - return { toolResult: issue }; - } + case "search_code": { + const args = SearchCodeSchema.parse(request.params.arguments); + const results = await searchCode(args); + return { toolResult: results }; + } - case "create_pull_request": { - const args = CreatePullRequestSchema.parse(request.params.arguments); - const { owner, repo, ...options } = args; - const pullRequest = await createPullRequest(owner, repo, options); - return { toolResult: pullRequest }; - } + case "search_issues": { + const args = SearchIssuesSchema.parse(request.params.arguments); + const results = await searchIssues(args); + return { toolResult: results }; + } - default: - throw new Error(`Unknown tool: ${request.params.name}`); - } - } catch (error) { - if (error instanceof z.ZodError) { - throw new Error(`Invalid arguments: ${error.errors.map(e => `${e.path.join('.')}: ${e.message}`).join(', ')}`); + case "search_users": { + const args = SearchUsersSchema.parse(request.params.arguments); + const results = await searchUsers(args); + return { toolResult: results }; + } + + default: + throw new Error(`Unknown tool: ${request.params.name}`); + } + } catch (error) { + if (error instanceof z.ZodError) { + throw new Error( + `Invalid arguments: ${error.errors + .map( + (e: z.ZodError["errors"][number]) => + `${e.path.join(".")}: ${e.message}` + ) + .join(", ")}` + ); + } + throw error; } - throw error; } -}); +); async function runServer() { const transport = new StdioServerTransport(); @@ -643,4 +797,4 @@ async function runServer() { runServer().catch((error) => { console.error("Fatal error in main():", error); process.exit(1); -}); \ No newline at end of file +}); diff --git a/src/github/package.json b/src/github/package.json index bc7710e4..25c52b72 100644 --- a/src/github/package.json +++ b/src/github/package.json @@ -20,8 +20,10 @@ }, "dependencies": { "@modelcontextprotocol/sdk": "0.6.0", + "@types/node": "^20.11.0", "@types/node-fetch": "^2.6.12", "node-fetch": "^3.3.2", + "zod": "^3.22.4", "zod-to-json-schema": "^3.23.5" }, "devDependencies": { diff --git a/src/github/schemas.ts b/src/github/schemas.ts index 213458eb..7acb0ab4 100644 --- a/src/github/schemas.ts +++ b/src/github/schemas.ts @@ -1,10 +1,10 @@ -import { z } from 'zod'; +import { z } from "zod"; // Base schemas for common types export const GitHubAuthorSchema = z.object({ name: z.string(), email: z.string(), - date: z.string() + date: z.string(), }); // Repository related schemas @@ -15,7 +15,7 @@ export const GitHubOwnerSchema = z.object({ avatar_url: z.string(), url: z.string(), html_url: z.string(), - type: z.string() + type: z.string(), }); export const GitHubRepositorySchema = z.object({ @@ -35,7 +35,7 @@ export const GitHubRepositorySchema = z.object({ git_url: z.string(), ssh_url: z.string(), clone_url: z.string(), - default_branch: z.string() + default_branch: z.string(), }); // File content schemas @@ -50,7 +50,7 @@ export const GitHubFileContentSchema = z.object({ url: z.string(), git_url: z.string(), html_url: z.string(), - download_url: z.string() + download_url: z.string(), }); export const GitHubDirectoryContentSchema = z.object({ @@ -62,35 +62,35 @@ export const GitHubDirectoryContentSchema = z.object({ url: z.string(), git_url: z.string(), html_url: z.string(), - download_url: z.string().nullable() + download_url: z.string().nullable(), }); export const GitHubContentSchema = z.union([ GitHubFileContentSchema, - z.array(GitHubDirectoryContentSchema) + z.array(GitHubDirectoryContentSchema), ]); // Operation schemas export const FileOperationSchema = z.object({ path: z.string(), - content: z.string() + content: z.string(), }); // Tree and commit schemas export const GitHubTreeEntrySchema = z.object({ path: z.string(), - mode: z.enum(['100644', '100755', '040000', '160000', '120000']), - type: z.enum(['blob', 'tree', 'commit']), + mode: z.enum(["100644", "100755", "040000", "160000", "120000"]), + type: z.enum(["blob", "tree", "commit"]), size: z.number().optional(), sha: z.string(), - url: z.string() + url: z.string(), }); export const GitHubTreeSchema = z.object({ sha: z.string(), url: z.string(), tree: z.array(GitHubTreeEntrySchema), - truncated: z.boolean() + truncated: z.boolean(), }); export const GitHubCommitSchema = z.object({ @@ -102,12 +102,14 @@ export const GitHubCommitSchema = z.object({ message: z.string(), tree: z.object({ sha: z.string(), - url: z.string() + url: z.string(), }), - parents: z.array(z.object({ - sha: z.string(), - url: z.string() - })) + parents: z.array( + z.object({ + sha: z.string(), + url: z.string(), + }) + ), }); // Reference schema @@ -118,8 +120,8 @@ export const GitHubReferenceSchema = z.object({ object: z.object({ sha: z.string(), type: z.string(), - url: z.string() - }) + url: z.string(), + }), }); // Input schemas for operations @@ -127,7 +129,7 @@ export const CreateRepositoryOptionsSchema = z.object({ name: z.string(), description: z.string().optional(), private: z.boolean().optional(), - auto_init: z.boolean().optional() + auto_init: z.boolean().optional(), }); export const CreateIssueOptionsSchema = z.object({ @@ -135,7 +137,7 @@ export const CreateIssueOptionsSchema = z.object({ body: z.string().optional(), assignees: z.array(z.string()).optional(), milestone: z.number().optional(), - labels: z.array(z.string()).optional() + labels: z.array(z.string()).optional(), }); export const CreatePullRequestOptionsSchema = z.object({ @@ -144,12 +146,12 @@ export const CreatePullRequestOptionsSchema = z.object({ head: z.string(), base: z.string(), maintainer_can_modify: z.boolean().optional(), - draft: z.boolean().optional() + draft: z.boolean().optional(), }); export const CreateBranchOptionsSchema = z.object({ ref: z.string(), - sha: z.string() + sha: z.string(), }); // Response schemas for operations @@ -164,21 +166,23 @@ export const GitHubCreateUpdateFileResponseSchema = z.object({ committer: GitHubAuthorSchema, message: z.string(), tree: z.object({ - sha: z.string(), - url: z.string() - }), - parents: z.array(z.object({ sha: z.string(), url: z.string(), - html_url: z.string() - })) - }) + }), + parents: z.array( + z.object({ + sha: z.string(), + url: z.string(), + html_url: z.string(), + }) + ), + }), }); export const GitHubSearchResponseSchema = z.object({ total_count: z.number(), incomplete_results: z.boolean(), - items: z.array(GitHubRepositorySchema) + items: z.array(GitHubRepositorySchema), }); // Fork related schemas @@ -188,14 +192,14 @@ export const GitHubForkParentSchema = z.object({ owner: z.object({ login: z.string(), id: z.number(), - avatar_url: z.string() + avatar_url: z.string(), }), - html_url: z.string() + html_url: z.string(), }); export const GitHubForkSchema = GitHubRepositorySchema.extend({ parent: GitHubForkParentSchema, - source: GitHubForkParentSchema + source: GitHubForkParentSchema, }); // Issue related schemas @@ -206,7 +210,7 @@ export const GitHubLabelSchema = z.object({ name: z.string(), color: z.string(), default: z.boolean(), - description: z.string().optional() + description: z.string().optional(), }); export const GitHubIssueAssigneeSchema = z.object({ @@ -214,7 +218,7 @@ export const GitHubIssueAssigneeSchema = z.object({ id: z.number(), avatar_url: z.string(), url: z.string(), - html_url: z.string() + html_url: z.string(), }); export const GitHubMilestoneSchema = z.object({ @@ -226,7 +230,7 @@ export const GitHubMilestoneSchema = z.object({ number: z.number(), title: z.string(), description: z.string(), - state: z.string() + state: z.string(), }); export const GitHubIssueSchema = z.object({ @@ -251,7 +255,7 @@ export const GitHubIssueSchema = z.object({ created_at: z.string(), updated_at: z.string(), closed_at: z.string().nullable(), - body: z.string() + body: z.string(), }); // Pull Request related schemas @@ -260,7 +264,7 @@ export const GitHubPullRequestHeadSchema = z.object({ ref: z.string(), sha: z.string(), user: GitHubIssueAssigneeSchema, - repo: GitHubRepositorySchema + repo: GitHubRepositorySchema, }); export const GitHubPullRequestSchema = z.object({ @@ -285,12 +289,12 @@ export const GitHubPullRequestSchema = z.object({ assignee: GitHubIssueAssigneeSchema.nullable(), assignees: z.array(GitHubIssueAssigneeSchema), head: GitHubPullRequestHeadSchema, - base: GitHubPullRequestHeadSchema + base: GitHubPullRequestHeadSchema, }); const RepoParamsSchema = z.object({ owner: z.string().describe("Repository owner (username or organization)"), - repo: z.string().describe("Repository name") + repo: z.string().describe("Repository name"), }); export const CreateOrUpdateFileSchema = RepoParamsSchema.extend({ @@ -298,81 +302,288 @@ export const CreateOrUpdateFileSchema = RepoParamsSchema.extend({ content: z.string().describe("Content of the file"), message: z.string().describe("Commit message"), branch: z.string().describe("Branch to create/update the file in"), - sha: z.string().optional() - .describe("SHA of the file being replaced (required when updating existing files)") + sha: z + .string() + .optional() + .describe( + "SHA of the file being replaced (required when updating existing files)" + ), }); export const SearchRepositoriesSchema = z.object({ query: z.string().describe("Search query (see GitHub search syntax)"), - page: z.number().optional().describe("Page number for pagination (default: 1)"), - perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)") + page: z + .number() + .optional() + .describe("Page number for pagination (default: 1)"), + perPage: z + .number() + .optional() + .describe("Number of results per page (default: 30, max: 100)"), }); export const CreateRepositorySchema = z.object({ name: z.string().describe("Repository name"), description: z.string().optional().describe("Repository description"), - private: z.boolean().optional().describe("Whether the repository should be private"), - autoInit: z.boolean().optional().describe("Initialize with README.md") + private: z + .boolean() + .optional() + .describe("Whether the repository should be private"), + autoInit: z.boolean().optional().describe("Initialize with README.md"), }); export const GetFileContentsSchema = RepoParamsSchema.extend({ path: z.string().describe("Path to the file or directory"), - branch: z.string().optional().describe("Branch to get contents from") + branch: z.string().optional().describe("Branch to get contents from"), }); export const PushFilesSchema = RepoParamsSchema.extend({ branch: z.string().describe("Branch to push to (e.g., 'main' or 'master')"), - files: z.array(z.object({ - path: z.string().describe("Path where to create the file"), - content: z.string().describe("Content of the file") - })).describe("Array of files to push"), - message: z.string().describe("Commit message") + files: z + .array( + z.object({ + path: z.string().describe("Path where to create the file"), + content: z.string().describe("Content of the file"), + }) + ) + .describe("Array of files to push"), + message: z.string().describe("Commit message"), }); export const CreateIssueSchema = RepoParamsSchema.extend({ title: z.string().describe("Issue title"), body: z.string().optional().describe("Issue body/description"), - assignees: z.array(z.string()).optional().describe("Array of usernames to assign"), + assignees: z + .array(z.string()) + .optional() + .describe("Array of usernames to assign"), labels: z.array(z.string()).optional().describe("Array of label names"), - milestone: z.number().optional().describe("Milestone number to assign") + milestone: z.number().optional().describe("Milestone number to assign"), }); export const CreatePullRequestSchema = RepoParamsSchema.extend({ title: z.string().describe("Pull request title"), body: z.string().optional().describe("Pull request body/description"), - head: z.string().describe("The name of the branch where your changes are implemented"), - base: z.string().describe("The name of the branch you want the changes pulled into"), - draft: z.boolean().optional().describe("Whether to create the pull request as a draft"), - maintainer_can_modify: z.boolean().optional() - .describe("Whether maintainers can modify the pull request") + head: z + .string() + .describe("The name of the branch where your changes are implemented"), + base: z + .string() + .describe("The name of the branch you want the changes pulled into"), + draft: z + .boolean() + .optional() + .describe("Whether to create the pull request as a draft"), + maintainer_can_modify: z + .boolean() + .optional() + .describe("Whether maintainers can modify the pull request"), }); export const ForkRepositorySchema = RepoParamsSchema.extend({ - organization: z.string().optional() - .describe("Optional: organization to fork to (defaults to your personal account)") + organization: z + .string() + .optional() + .describe( + "Optional: organization to fork to (defaults to your personal account)" + ), }); export const CreateBranchSchema = RepoParamsSchema.extend({ branch: z.string().describe("Name for the new branch"), - from_branch: z.string().optional() - .describe("Optional: source branch to create from (defaults to the repository's default branch)") + from_branch: z + .string() + .optional() + .describe( + "Optional: source branch to create from (defaults to the repository's default branch)" + ), +}); + +// Search Response Schemas +export const SearchCodeItemSchema = z.object({ + name: z.string(), + path: z.string(), + sha: z.string(), + url: z.string(), + git_url: z.string(), + html_url: z.string(), + repository: GitHubRepositorySchema, + score: z.number(), +}); + +export const SearchCodeResponseSchema = z.object({ + total_count: z.number(), + incomplete_results: z.boolean(), + items: z.array(SearchCodeItemSchema), +}); + +export const SearchIssueItemSchema = z.object({ + url: z.string(), + repository_url: z.string(), + labels_url: z.string(), + comments_url: z.string(), + events_url: z.string(), + html_url: z.string(), + id: z.number(), + node_id: z.string(), + number: z.number(), + title: z.string(), + user: GitHubIssueAssigneeSchema, + labels: z.array(GitHubLabelSchema), + state: z.string(), + locked: z.boolean(), + assignee: GitHubIssueAssigneeSchema.nullable(), + assignees: z.array(GitHubIssueAssigneeSchema), + comments: z.number(), + created_at: z.string(), + updated_at: z.string(), + closed_at: z.string().nullable(), + body: z.string(), + score: z.number(), + pull_request: z + .object({ + url: z.string(), + html_url: z.string(), + diff_url: z.string(), + patch_url: z.string(), + }) + .optional(), +}); + +export const SearchIssuesResponseSchema = z.object({ + total_count: z.number(), + incomplete_results: z.boolean(), + items: z.array(SearchIssueItemSchema), +}); + +export const SearchUserItemSchema = z.object({ + login: z.string(), + id: z.number(), + node_id: z.string(), + avatar_url: z.string(), + gravatar_id: z.string(), + url: z.string(), + html_url: z.string(), + followers_url: z.string(), + following_url: z.string(), + gists_url: z.string(), + starred_url: z.string(), + subscriptions_url: z.string(), + organizations_url: z.string(), + repos_url: z.string(), + events_url: z.string(), + received_events_url: z.string(), + type: z.string(), + site_admin: z.boolean(), + score: z.number(), +}); + +export const SearchUsersResponseSchema = z.object({ + total_count: z.number(), + incomplete_results: z.boolean(), + items: z.array(SearchUserItemSchema), +}); + +// Search Input Schemas +export const SearchCodeSchema = z.object({ + q: z.string().describe("Search query (see GitHub code search syntax)"), + sort: z + .enum(["", "indexed"]) + .optional() + .describe("Sort field (only 'indexed' is supported)"), + order: z + .enum(["asc", "desc"]) + .optional() + .describe("Sort order (asc or desc)"), + per_page: z + .number() + .min(1) + .max(100) + .optional() + .describe("Results per page (max 100)"), + page: z.number().min(1).optional().describe("Page number"), +}); + +export const SearchIssuesSchema = z.object({ + q: z.string().describe("Search query (see GitHub issues search syntax)"), + sort: z + .enum([ + "comments", + "reactions", + "reactions-+1", + "reactions--1", + "reactions-smile", + "reactions-thinking_face", + "reactions-heart", + "reactions-tada", + "interactions", + "created", + "updated", + ]) + .optional() + .describe("Sort field"), + order: z + .enum(["asc", "desc"]) + .optional() + .describe("Sort order (asc or desc)"), + per_page: z + .number() + .min(1) + .max(100) + .optional() + .describe("Results per page (max 100)"), + page: z.number().min(1).optional().describe("Page number"), +}); + +export const SearchUsersSchema = z.object({ + q: z.string().describe("Search query (see GitHub users search syntax)"), + sort: z + .enum(["followers", "repositories", "joined"]) + .optional() + .describe("Sort field"), + order: z + .enum(["asc", "desc"]) + .optional() + .describe("Sort order (asc or desc)"), + per_page: z + .number() + .min(1) + .max(100) + .optional() + .describe("Results per page (max 100)"), + page: z.number().min(1).optional().describe("Page number"), }); // Export types export type GitHubAuthor = z.infer; export type GitHubFork = z.infer; export type GitHubIssue = z.infer; -export type GitHubPullRequest = z.infer;export type GitHubRepository = z.infer; +export type GitHubPullRequest = z.infer; +export type GitHubRepository = z.infer; export type GitHubFileContent = z.infer; -export type GitHubDirectoryContent = z.infer; +export type GitHubDirectoryContent = z.infer< + typeof GitHubDirectoryContentSchema +>; export type GitHubContent = z.infer; export type FileOperation = z.infer; export type GitHubTree = z.infer; export type GitHubCommit = z.infer; export type GitHubReference = z.infer; -export type CreateRepositoryOptions = z.infer; +export type CreateRepositoryOptions = z.infer< + typeof CreateRepositoryOptionsSchema +>; export type CreateIssueOptions = z.infer; -export type CreatePullRequestOptions = z.infer; +export type CreatePullRequestOptions = z.infer< + typeof CreatePullRequestOptionsSchema +>; export type CreateBranchOptions = z.infer; -export type GitHubCreateUpdateFileResponse = z.infer; -export type GitHubSearchResponse = z.infer; \ No newline at end of file +export type GitHubCreateUpdateFileResponse = z.infer< + typeof GitHubCreateUpdateFileResponseSchema +>; +export type GitHubSearchResponse = z.infer; +export type SearchCodeItem = z.infer; +export type SearchCodeResponse = z.infer; +export type SearchIssueItem = z.infer; +export type SearchIssuesResponse = z.infer; +export type SearchUserItem = z.infer; +export type SearchUsersResponse = z.infer; From c6a2597fcacead84436c6b4f3f266c13c9a5ff0a Mon Sep 17 00:00:00 2001 From: Raduan77 Date: Thu, 28 Nov 2024 09:54:14 +0100 Subject: [PATCH 02/43] bump docs --- src/github/README.md | 57 ++++++++++- src/github/package.json | 2 +- src/github/schemas.ts | 204 ++++++++++++++++++++++++++-------------- 3 files changed, 192 insertions(+), 71 deletions(-) diff --git a/src/github/README.md b/src/github/README.md index cfd268a8..b5b0bfa6 100644 --- a/src/github/README.md +++ b/src/github/README.md @@ -1,6 +1,6 @@ # GitHub MCP Server -MCP Server for the GitHub API, enabling file operations, repository management, and more. +MCP Server for the GitHub API, enabling file operations, repository management, search functionality, and more. ### Features @@ -8,6 +8,7 @@ MCP Server for the GitHub API, enabling file operations, repository management, - **Comprehensive Error Handling**: Clear error messages for common issues - **Git History Preservation**: Operations maintain proper Git history without force pushing - **Batch Operations**: Support for both single-file and multi-file operations +- **Advanced Search**: Support for searching code, issues/PRs, and users ## Tools @@ -102,6 +103,60 @@ MCP Server for the GitHub API, enabling file operations, repository management, - `from_branch` (optional string): Source branch (defaults to repo default) - Returns: Created branch reference +10. `search_code` + - Search for code across GitHub repositories + - Inputs: + - `q` (string): Search query using GitHub code search syntax + - `sort` (optional string): Sort field ('indexed' only) + - `order` (optional string): Sort order ('asc' or 'desc') + - `per_page` (optional number): Results per page (max 100) + - `page` (optional number): Page number + - Returns: Code search results with repository context + +11. `search_issues` + - Search for issues and pull requests + - Inputs: + - `q` (string): Search query using GitHub issues search syntax + - `sort` (optional string): Sort field (comments, reactions, created, etc.) + - `order` (optional string): Sort order ('asc' or 'desc') + - `per_page` (optional number): Results per page (max 100) + - `page` (optional number): Page number + - Returns: Issue and pull request search results + +12. `search_users` + - Search for GitHub users + - Inputs: + - `q` (string): Search query using GitHub users search syntax + - `sort` (optional string): Sort field (followers, repositories, joined) + - `order` (optional string): Sort order ('asc' or 'desc') + - `per_page` (optional number): Results per page (max 100) + - `page` (optional number): Page number + - Returns: User search results + +## Search Query Syntax + +### Code Search +- `language:javascript`: Search by programming language +- `repo:owner/name`: Search in specific repository +- `path:app/src`: Search in specific path +- `extension:js`: Search by file extension +- Example: `q: "import express" language:typescript path:src/` + +### Issues Search +- `is:issue` or `is:pr`: Filter by type +- `is:open` or `is:closed`: Filter by state +- `label:bug`: Search by label +- `author:username`: Search by author +- Example: `q: "memory leak" is:issue is:open label:bug` + +### Users Search +- `type:user` or `type:org`: Filter by account type +- `followers:>1000`: Filter by followers +- `location:London`: Search by location +- Example: `q: "fullstack developer" location:London followers:>100` + +For detailed search syntax, see [GitHub's searching documentation](https://docs.github.com/en/search-github/searching-on-github). + ## Setup ### Personal Access Token diff --git a/src/github/package.json b/src/github/package.json index 25c52b72..e15e486d 100644 --- a/src/github/package.json +++ b/src/github/package.json @@ -1,6 +1,6 @@ { "name": "@modelcontextprotocol/server-github", - "version": "0.5.1", + "version": "0.6.0", "description": "MCP server for using the GitHub API", "license": "MIT", "author": "Anthropic, PBC (https://anthropic.com)", diff --git a/src/github/schemas.ts b/src/github/schemas.ts index 7acb0ab4..f6b98727 100644 --- a/src/github/schemas.ts +++ b/src/github/schemas.ts @@ -399,98 +399,148 @@ export const CreateBranchSchema = RepoParamsSchema.extend({ ), }); -// Search Response Schemas +/** + * Response schema for a code search result item + * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-code + */ export const SearchCodeItemSchema = z.object({ - name: z.string(), - path: z.string(), - sha: z.string(), - url: z.string(), - git_url: z.string(), - html_url: z.string(), - repository: GitHubRepositorySchema, - score: z.number(), + name: z.string().describe("The name of the file"), + path: z.string().describe("The path to the file in the repository"), + sha: z.string().describe("The SHA hash of the file"), + url: z.string().describe("The API URL for this file"), + git_url: z.string().describe("The Git URL for this file"), + html_url: z.string().describe("The HTML URL to view this file on GitHub"), + repository: GitHubRepositorySchema.describe( + "The repository where this file was found" + ), + score: z.number().describe("The search result score"), }); +/** + * Response schema for code search results + */ export const SearchCodeResponseSchema = z.object({ - total_count: z.number(), - incomplete_results: z.boolean(), - items: z.array(SearchCodeItemSchema), + total_count: z.number().describe("Total number of matching results"), + incomplete_results: z + .boolean() + .describe("Whether the results are incomplete"), + items: z.array(SearchCodeItemSchema).describe("The search results"), }); +/** + * Response schema for an issue search result item + * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-issues-and-pull-requests + */ export const SearchIssueItemSchema = z.object({ - url: z.string(), - repository_url: z.string(), - labels_url: z.string(), - comments_url: z.string(), - events_url: z.string(), - html_url: z.string(), - id: z.number(), - node_id: z.string(), - number: z.number(), - title: z.string(), - user: GitHubIssueAssigneeSchema, - labels: z.array(GitHubLabelSchema), - state: z.string(), - locked: z.boolean(), - assignee: GitHubIssueAssigneeSchema.nullable(), - assignees: z.array(GitHubIssueAssigneeSchema), - comments: z.number(), - created_at: z.string(), - updated_at: z.string(), - closed_at: z.string().nullable(), - body: z.string(), - score: z.number(), + url: z.string().describe("The API URL for this issue"), + repository_url: z + .string() + .describe("The API URL for the repository where this issue was found"), + labels_url: z.string().describe("The API URL for the labels of this issue"), + comments_url: z.string().describe("The API URL for comments of this issue"), + events_url: z.string().describe("The API URL for events of this issue"), + html_url: z.string().describe("The HTML URL to view this issue on GitHub"), + id: z.number().describe("The ID of this issue"), + node_id: z.string().describe("The Node ID of this issue"), + number: z.number().describe("The number of this issue"), + title: z.string().describe("The title of this issue"), + user: GitHubIssueAssigneeSchema.describe("The user who created this issue"), + labels: z.array(GitHubLabelSchema).describe("The labels of this issue"), + state: z.string().describe("The state of this issue"), + locked: z.boolean().describe("Whether this issue is locked"), + assignee: GitHubIssueAssigneeSchema.nullable().describe( + "The assignee of this issue" + ), + assignees: z + .array(GitHubIssueAssigneeSchema) + .describe("The assignees of this issue"), + comments: z.number().describe("The number of comments on this issue"), + created_at: z.string().describe("The creation time of this issue"), + updated_at: z.string().describe("The last update time of this issue"), + closed_at: z.string().nullable().describe("The closure time of this issue"), + body: z.string().describe("The body of this issue"), + score: z.number().describe("The search result score"), pull_request: z .object({ - url: z.string(), - html_url: z.string(), - diff_url: z.string(), - patch_url: z.string(), + url: z.string().describe("The API URL for this pull request"), + html_url: z.string().describe("The HTML URL to view this pull request"), + diff_url: z.string().describe("The URL to view the diff"), + patch_url: z.string().describe("The URL to view the patch"), }) - .optional(), + .optional() + .describe("Pull request details if this is a PR"), }); +/** + * Response schema for issue search results + */ export const SearchIssuesResponseSchema = z.object({ - total_count: z.number(), - incomplete_results: z.boolean(), - items: z.array(SearchIssueItemSchema), + total_count: z.number().describe("Total number of matching results"), + incomplete_results: z + .boolean() + .describe("Whether the results are incomplete"), + items: z.array(SearchIssueItemSchema).describe("The search results"), }); +/** + * Response schema for a user search result item + * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-users + */ export const SearchUserItemSchema = z.object({ - login: z.string(), - id: z.number(), - node_id: z.string(), - avatar_url: z.string(), - gravatar_id: z.string(), - url: z.string(), - html_url: z.string(), - followers_url: z.string(), - following_url: z.string(), - gists_url: z.string(), - starred_url: z.string(), - subscriptions_url: z.string(), - organizations_url: z.string(), - repos_url: z.string(), - events_url: z.string(), - received_events_url: z.string(), - type: z.string(), - site_admin: z.boolean(), - score: z.number(), + login: z.string().describe("The username of the user"), + id: z.number().describe("The ID of the user"), + node_id: z.string().describe("The Node ID of the user"), + avatar_url: z.string().describe("The avatar URL of the user"), + gravatar_id: z.string().describe("The Gravatar ID of the user"), + url: z.string().describe("The API URL for this user"), + html_url: z.string().describe("The HTML URL to view this user on GitHub"), + followers_url: z.string().describe("The API URL for followers of this user"), + following_url: z.string().describe("The API URL for following of this user"), + gists_url: z.string().describe("The API URL for gists of this user"), + starred_url: z + .string() + .describe("The API URL for starred repositories of this user"), + subscriptions_url: z + .string() + .describe("The API URL for subscriptions of this user"), + organizations_url: z + .string() + .describe("The API URL for organizations of this user"), + repos_url: z.string().describe("The API URL for repositories of this user"), + events_url: z.string().describe("The API URL for events of this user"), + received_events_url: z + .string() + .describe("The API URL for received events of this user"), + type: z.string().describe("The type of this user"), + site_admin: z.boolean().describe("Whether this user is a site administrator"), + score: z.number().describe("The search result score"), }); +/** + * Response schema for user search results + */ export const SearchUsersResponseSchema = z.object({ - total_count: z.number(), - incomplete_results: z.boolean(), - items: z.array(SearchUserItemSchema), + total_count: z.number().describe("Total number of matching results"), + incomplete_results: z + .boolean() + .describe("Whether the results are incomplete"), + items: z.array(SearchUserItemSchema).describe("The search results"), }); -// Search Input Schemas +/** + * Input schema for code search + * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-code--parameters + */ export const SearchCodeSchema = z.object({ - q: z.string().describe("Search query (see GitHub code search syntax)"), + q: z + .string() + .describe( + "Search query. See GitHub code search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-code" + ), sort: z .enum(["", "indexed"]) .optional() - .describe("Sort field (only 'indexed' is supported)"), + .describe("Sort field. Only 'indexed' is supported"), order: z .enum(["asc", "desc"]) .optional() @@ -504,8 +554,16 @@ export const SearchCodeSchema = z.object({ page: z.number().min(1).optional().describe("Page number"), }); +/** + * Input schema for issues search + * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-issues-and-pull-requests--parameters + */ export const SearchIssuesSchema = z.object({ - q: z.string().describe("Search query (see GitHub issues search syntax)"), + q: z + .string() + .describe( + "Search query. See GitHub issues search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-issues-and-pull-requests" + ), sort: z .enum([ "comments", @@ -535,8 +593,16 @@ export const SearchIssuesSchema = z.object({ page: z.number().min(1).optional().describe("Page number"), }); +/** + * Input schema for users search + * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-users--parameters + */ export const SearchUsersSchema = z.object({ - q: z.string().describe("Search query (see GitHub users search syntax)"), + q: z + .string() + .describe( + "Search query. See GitHub users search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-users" + ), sort: z .enum(["followers", "repositories", "joined"]) .optional() From 08015830a68f4dfa2cc56e40cdd82538ea85322c Mon Sep 17 00:00:00 2001 From: Jerad Bitner Date: Thu, 28 Nov 2024 13:59:37 -0800 Subject: [PATCH 03/43] feat: add issue management functionalities for github - Implemented `listIssues`, `updateIssue`, and `addIssueComment` functions to manage GitHub issues. - Introduced corresponding schemas: `ListIssuesOptionsSchema`, `UpdateIssueOptionsSchema`, and `IssueCommentSchema`. - Updated server request handlers to support new functionalities. - Enhanced README with documentation for new features. --- src/github/README.md | 37 ++++++++++++ src/github/index.ts | 133 +++++++++++++++++++++++++++++++++++++++++- src/github/schemas.ts | 33 +++++++++++ 3 files changed, 202 insertions(+), 1 deletion(-) diff --git a/src/github/README.md b/src/github/README.md index cfd268a8..a3ce0a13 100644 --- a/src/github/README.md +++ b/src/github/README.md @@ -102,6 +102,43 @@ MCP Server for the GitHub API, enabling file operations, repository management, - `from_branch` (optional string): Source branch (defaults to repo default) - Returns: Created branch reference +10. `list_issues` + - List and filter repository issues + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `state` (optional string): Filter by state ('open', 'closed', 'all') + - `labels` (optional string[]): Filter by labels + - `sort` (optional string): Sort by ('created', 'updated', 'comments') + - `direction` (optional string): Sort direction ('asc', 'desc') + - `since` (optional string): Filter by date (ISO 8601 timestamp) + - `page` (optional number): Page number + - `per_page` (optional number): Results per page + - Returns: Array of issue details + +11. `update_issue` + - Update an existing issue + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `issue_number` (number): Issue number to update + - `title` (optional string): New title + - `body` (optional string): New description + - `state` (optional string): New state ('open' or 'closed') + - `labels` (optional string[]): New labels + - `assignees` (optional string[]): New assignees + - `milestone` (optional number): New milestone number + - Returns: Updated issue details + +12. `add_issue_comment` + - Add a comment to an issue + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `issue_number` (number): Issue number to comment on + - `body` (string): Comment text + - Returns: Created comment details + ## Setup ### Personal Access Token diff --git a/src/github/index.ts b/src/github/index.ts index 0676a34c..ab691e38 100644 --- a/src/github/index.ts +++ b/src/github/index.ts @@ -41,7 +41,10 @@ import { CreateIssueSchema, CreatePullRequestSchema, ForkRepositorySchema, - CreateBranchSchema + CreateBranchSchema, + ListIssuesOptionsSchema, + UpdateIssueOptionsSchema, + IssueCommentSchema } from './schemas.js'; import { z } from 'zod'; import { zodToJsonSchema } from 'zod-to-json-schema'; @@ -467,6 +470,98 @@ async function createRepository( return GitHubRepositorySchema.parse(await response.json()); } +async function listIssues( + owner: string, + repo: string, + options: z.infer +): Promise { + const url = new URL(`https://api.github.com/repos/${owner}/${repo}/issues`); + + // Add query parameters + if (options.state) url.searchParams.append('state', options.state); + if (options.labels) url.searchParams.append('labels', options.labels.join(',')); + if (options.sort) url.searchParams.append('sort', options.sort); + if (options.direction) url.searchParams.append('direction', options.direction); + if (options.since) url.searchParams.append('since', options.since); + if (options.page) url.searchParams.append('page', options.page.toString()); + if (options.per_page) url.searchParams.append('per_page', options.per_page.toString()); + + const response = await fetch(url.toString(), { + headers: { + "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + "Accept": "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server" + } + }); + + if (!response.ok) { + throw new Error(`GitHub API error: ${response.statusText}`); + } + + return z.array(GitHubIssueSchema).parse(await response.json()); +} + +async function updateIssue( + owner: string, + repo: string, + issueNumber: number, + options: z.infer +): Promise { + const response = await fetch( + `https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}`, + { + method: "PATCH", + headers: { + "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + "Accept": "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + "Content-Type": "application/json" + }, + body: JSON.stringify({ + title: options.title, + body: options.body, + state: options.state, + labels: options.labels, + assignees: options.assignees, + milestone: options.milestone + }) + } + ); + + if (!response.ok) { + throw new Error(`GitHub API error: ${response.statusText}`); + } + + return GitHubIssueSchema.parse(await response.json()); +} + +async function addIssueComment( + owner: string, + repo: string, + issueNumber: number, + body: string +): Promise> { + const response = await fetch( + `https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}/comments`, + { + method: "POST", + headers: { + "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + "Accept": "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + "Content-Type": "application/json" + }, + body: JSON.stringify({ body }) + } + ); + + if (!response.ok) { + throw new Error(`GitHub API error: ${response.statusText}`); + } + + return IssueCommentSchema.parse(await response.json()); +} + server.setRequestHandler(ListToolsRequestSchema, async () => { return { tools: [ @@ -514,6 +609,21 @@ server.setRequestHandler(ListToolsRequestSchema, async () => { name: "create_branch", description: "Create a new branch in a GitHub repository", inputSchema: zodToJsonSchema(CreateBranchSchema) + }, + { + name: "list_issues", + description: "List issues in a GitHub repository with filtering options", + inputSchema: zodToJsonSchema(ListIssuesOptionsSchema) + }, + { + name: "update_issue", + description: "Update an existing issue in a GitHub repository", + inputSchema: zodToJsonSchema(UpdateIssueOptionsSchema) + }, + { + name: "add_issue_comment", + description: "Add a comment to an existing issue", + inputSchema: zodToJsonSchema(IssueCommentSchema) } ] }; @@ -623,6 +733,27 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { return { toolResult: pullRequest }; } + case "list_issues": { + const args = ListIssuesOptionsSchema.parse(request.params.arguments); + const { owner, repo, ...options } = args; + const issues = await listIssues(owner, repo, options); + return { toolResult: issues }; + } + + case "update_issue": { + const args = UpdateIssueOptionsSchema.parse(request.params.arguments); + const { owner, repo, issue_number, ...options } = args; + const issue = await updateIssue(owner, repo, issue_number, options); + return { toolResult: issue }; + } + + case "add_issue_comment": { + const args = IssueCommentSchema.parse(request.params.arguments); + const { owner, repo, issue_number, body } = args; + const comment = await addIssueComment(owner, repo, issue_number, body); + return { toolResult: comment }; + } + default: throw new Error(`Unknown tool: ${request.params.name}`); } diff --git a/src/github/schemas.ts b/src/github/schemas.ts index 213458eb..e57ea6d9 100644 --- a/src/github/schemas.ts +++ b/src/github/schemas.ts @@ -358,6 +358,39 @@ export const CreateBranchSchema = RepoParamsSchema.extend({ .describe("Optional: source branch to create from (defaults to the repository's default branch)") }); +// Add these schema definitions for issue management + +export const ListIssuesOptionsSchema = z.object({ + owner: z.string(), + repo: z.string(), + state: z.enum(['open', 'closed', 'all']).optional(), + labels: z.array(z.string()).optional(), + sort: z.enum(['created', 'updated', 'comments']).optional(), + direction: z.enum(['asc', 'desc']).optional(), + since: z.string().optional(), // ISO 8601 timestamp + page: z.number().optional(), + per_page: z.number().optional() +}); + +export const UpdateIssueOptionsSchema = z.object({ + owner: z.string(), + repo: z.string(), + issue_number: z.number(), + title: z.string().optional(), + body: z.string().optional(), + state: z.enum(['open', 'closed']).optional(), + labels: z.array(z.string()).optional(), + assignees: z.array(z.string()).optional(), + milestone: z.number().optional() +}); + +export const IssueCommentSchema = z.object({ + owner: z.string(), + repo: z.string(), + issue_number: z.number(), + body: z.string() +}); + // Export types export type GitHubAuthor = z.infer; export type GitHubFork = z.infer; From 2b731fb70f950ad6fa4560653afdc20beeb49789 Mon Sep 17 00:00:00 2001 From: Mati Horovitz <7645314@gmail.com> Date: Sat, 30 Nov 2024 20:22:35 +0200 Subject: [PATCH 04/43] fix(fetch): fix puppeteer server to allow evaluate async functions --- src/puppeteer/index.ts | 45 ++++++++++++++++++++++++++---------------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/src/puppeteer/index.ts b/src/puppeteer/index.ts index d3aa2a30..82f7c86d 100644 --- a/src/puppeteer/index.ts +++ b/src/puppeteer/index.ts @@ -124,6 +124,15 @@ async function ensureBrowser() { return page!; } +declare global { + interface Window { + mcpHelper: { + logs: string[], + originalConsole: Partial, + } + } +} + async function handleToolCall(name: string, args: any): Promise<{ toolResult: CallToolResult }> { const page = await ensureBrowser(); @@ -285,33 +294,35 @@ async function handleToolCall(name: string, args: any): Promise<{ toolResult: Ca case "puppeteer_evaluate": try { - const result = await page.evaluate((script) => { - const logs: string[] = []; - const originalConsole = { ...console }; + await page.evaluate(() => { + window.mcpHelper = { + logs: [], + originalConsole: { ...console }, + }; ['log', 'info', 'warn', 'error'].forEach(method => { (console as any)[method] = (...args: any[]) => { - logs.push(`[${method}] ${args.join(' ')}`); - (originalConsole as any)[method](...args); + window.mcpHelper.logs.push(`[${method}] ${args.join(' ')}`); + (window.mcpHelper.originalConsole as any)[method](...args); }; - }); - - try { - const result = eval(script); - Object.assign(console, originalConsole); - return { result, logs }; - } catch (error) { - Object.assign(console, originalConsole); - throw error; - } - }, args.script); + } ); + } ); + + const result = await page.evaluate( args.script ); + + const logs = await page.evaluate(() => { + Object.assign(console, window.mcpHelper.originalConsole); + const logs = window.mcpHelper.logs; + delete ( window.mcpHelper as any).logs; + return logs; + }); return { toolResult: { content: [ { type: "text", - text: `Execution result:\n${JSON.stringify(result.result, null, 2)}\n\nConsole output:\n${result.logs.join('\n')}`, + text: `Execution result:\n${JSON.stringify(result, null, 2)}\n\nConsole output:\n${logs.join('\n')}`, }, ], isError: false, From 68b880d96b797a0bddf0322effb29edf67afdc8d Mon Sep 17 00:00:00 2001 From: Mati Horovitz <7645314@gmail.com> Date: Sat, 30 Nov 2024 22:57:18 +0200 Subject: [PATCH 05/43] Fix cleanup --- src/puppeteer/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/puppeteer/index.ts b/src/puppeteer/index.ts index 82f7c86d..9840fc2e 100644 --- a/src/puppeteer/index.ts +++ b/src/puppeteer/index.ts @@ -313,7 +313,7 @@ async function handleToolCall(name: string, args: any): Promise<{ toolResult: Ca const logs = await page.evaluate(() => { Object.assign(console, window.mcpHelper.originalConsole); const logs = window.mcpHelper.logs; - delete ( window.mcpHelper as any).logs; + delete ( window as any).mcpHelper; return logs; }); @@ -426,4 +426,4 @@ async function runServer() { await server.connect(transport); } -runServer().catch(console.error); \ No newline at end of file +runServer().catch(console.error); From e73d831c2563a722d39b8e963d2841e43337c925 Mon Sep 17 00:00:00 2001 From: RamXX Date: Sun, 1 Dec 2024 10:58:43 -0800 Subject: [PATCH 06/43] Added new community link --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 28604b9a..060b3a8a 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,7 @@ Each MCP server is implemented with either the [Typescript MCP SDK](https://gith - **[Cloudflare](https://github.com/cloudflare/mcp-server-cloudflare)** - Deploy, configure & interrogate your resources on the Cloudflare developer platform (e.g. Workers/KV/R2/D1) - **[Raygun](https://github.com/MindscapeHQ/mcp-server-raygun)** - Interact with your crash reporting and real using monitoring data on your Raygun account +- **[Tavily search](https://github.com/RamXX/mcp-tavily")** - An MCP server for Tavily's search & news API, with explicit site inclusions/exclusions ## 🚀 Getting Started From 579417305111343e7bfce42fcc3215bb8d6d630c Mon Sep 17 00:00:00 2001 From: Matt Ferrante Date: Sun, 1 Dec 2024 21:06:41 -0700 Subject: [PATCH 07/43] Added any-chat-completions-mcp --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 28604b9a..ebe759b6 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,7 @@ Each MCP server is implemented with either the [Typescript MCP SDK](https://gith - **[Cloudflare](https://github.com/cloudflare/mcp-server-cloudflare)** - Deploy, configure & interrogate your resources on the Cloudflare developer platform (e.g. Workers/KV/R2/D1) - **[Raygun](https://github.com/MindscapeHQ/mcp-server-raygun)** - Interact with your crash reporting and real using monitoring data on your Raygun account +- **[Any Chat Completions](https://github.com/pyroprompts/any-chat-completions-mcp)** - Interact with any OpenAI SDK Compatible Chat Completions API like OpenAI, Perplexity, Groq, xAI and many more. ## 🚀 Getting Started From b34948846924bcf334d586ba595062270f7501b9 Mon Sep 17 00:00:00 2001 From: Skirano Date: Mon, 2 Dec 2024 15:11:07 -0500 Subject: [PATCH 08/43] added aws kb server --- src/aws-kb-retrieval-server/README.md | 53 +++++++ src/aws-kb-retrieval-server/index.ts | 166 ++++++++++++++++++++++ src/aws-kb-retrieval-server/package.json | 30 ++++ src/aws-kb-retrieval-server/tsconfig.json | 17 +++ 4 files changed, 266 insertions(+) create mode 100644 src/aws-kb-retrieval-server/README.md create mode 100644 src/aws-kb-retrieval-server/index.ts create mode 100644 src/aws-kb-retrieval-server/package.json create mode 100644 src/aws-kb-retrieval-server/tsconfig.json diff --git a/src/aws-kb-retrieval-server/README.md b/src/aws-kb-retrieval-server/README.md new file mode 100644 index 00000000..ac2bdb43 --- /dev/null +++ b/src/aws-kb-retrieval-server/README.md @@ -0,0 +1,53 @@ +# AWS Knowledge Base Retrieval MCP Server + +An MCP server implementation for retrieving information from the AWS Knowledge Base using the Bedrock Agent Runtime. + +## Features + +- **RAG (Retrieval-Augmented Generation)**: Retrieve context from the AWS Knowledge Base based on a query and a Knowledge Base ID. +- **Supports multiple results retrieval**: Option to retrieve a customizable number of results. + +## Tools + +- **retrieve_from_aws_kb** + - Perform retrieval operations using the AWS Knowledge Base. + - Inputs: + - `query` (string): The search query for retrieval. + - `knowledgeBaseId` (string): The ID of the AWS Knowledge Base. + - `n` (number, optional): Number of results to retrieve (default: 3). + +## Configuration + +### Setting up AWS Credentials + +1. Obtain AWS access key ID, secret access key, and region from the AWS Management Console. +2. Ensure these credentials have appropriate permissions for Bedrock Agent Runtime operations. + +### Usage with Claude Desktop + +Add this to your `claude_desktop_config.json`: + +```json +{ + "mcpServers": { + "aws-kb-retrieval": { + "command": "npx", + "args": [ + "-y", + "@modelcontextprotocol/server-aws-kb-retrieval" + ], + "env": { + "AWS_ACCESS_KEY_ID": "YOUR_ACCESS_KEY_HERE", + "AWS_SECRET_ACCESS_KEY": "YOUR_SECRET_ACCESS_KEY_HERE", + "AWS_REGION": "YOUR_AWS_REGION_HERE" + } + } + } +} +``` + +## License + +This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. + +This README assumes that your server package is named `@modelcontextprotocol/server-aws-kb-retrieval`. Adjust the package name and installation details if they differ in your setup. Also, ensure that your server script is correctly built and that all dependencies are properly managed in your `package.json`. diff --git a/src/aws-kb-retrieval-server/index.ts b/src/aws-kb-retrieval-server/index.ts new file mode 100644 index 00000000..f60a544e --- /dev/null +++ b/src/aws-kb-retrieval-server/index.ts @@ -0,0 +1,166 @@ +#!/usr/bin/env node +import { Server } from "@modelcontextprotocol/sdk/server/index.js"; +import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; +import { + CallToolRequestSchema, + ListToolsRequestSchema, + Tool, +} from "@modelcontextprotocol/sdk/types.js"; +import { + BedrockAgentRuntimeClient, + RetrieveCommand, + RetrieveCommandInput, +} from "@aws-sdk/client-bedrock-agent-runtime"; + +// AWS client initialization +const bedrockClient = new BedrockAgentRuntimeClient({ + region: process.env.AWS_REGION, + credentials: { + accessKeyId: process.env.AWS_ACCESS_KEY_ID!, + secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!, + }, +}); + +interface RAGSource { + id: string; + fileName: string; + snippet: string; + score: number; +} + +async function retrieveContext( + query: string, + knowledgeBaseId: string, + n: number = 3 +): Promise<{ + context: string; + isRagWorking: boolean; + ragSources: RAGSource[]; +}> { + try { + if (!knowledgeBaseId) { + console.error("knowledgeBaseId is not provided"); + return { + context: "", + isRagWorking: false, + ragSources: [], + }; + } + + const input: RetrieveCommandInput = { + knowledgeBaseId: knowledgeBaseId, + retrievalQuery: { text: query }, + retrievalConfiguration: { + vectorSearchConfiguration: { numberOfResults: n }, + }, + }; + + const command = new RetrieveCommand(input); + const response = await bedrockClient.send(command); + const rawResults = response?.retrievalResults || []; + const ragSources: RAGSource[] = rawResults + .filter((res) => res?.content?.text) + .map((result, index) => { + const uri = result?.location?.s3Location?.uri || ""; + const fileName = uri.split("/").pop() || `Source-${index}.txt`; + return { + id: (result.metadata?.["x-amz-bedrock-kb-chunk-id"] as string) || `chunk-${index}`, + fileName: fileName.replace(/_/g, " ").replace(".txt", ""), + snippet: result.content?.text || "", + score: (result.score as number) || 0, + }; + }) + .slice(0, 3); + + const context = rawResults + .filter((res): res is { content: { text: string } } => res?.content?.text !== undefined) + .map(res => res.content.text) + .join("\n\n"); + + return { + context, + isRagWorking: true, + ragSources, + }; + } catch (error) { + console.error("RAG Error:", error); + return { context: "", isRagWorking: false, ragSources: [] }; + } +} + +// Define the retrieval tool +const RETRIEVAL_TOOL: Tool = { + name: "retrieve_from_aws_kb", + description: "Performs retrieval from the AWS Knowledge Base using the provided query and Knowledge Base ID.", + inputSchema: { + type: "object", + properties: { + query: { type: "string", description: "The query to perform retrieval on" }, + knowledgeBaseId: { type: "string", description: "The ID of the AWS Knowledge Base" }, + n: { type: "number", default: 3, description: "Number of results to retrieve" }, + }, + required: ["query", "knowledgeBaseId"], + }, +}; + +// Server setup +const server = new Server( + { + name: "aws-kb-retrieval-server", + version: "0.2.0", + }, + { + capabilities: { + tools: {}, + }, + }, +); + +// Request handlers +server.setRequestHandler(ListToolsRequestSchema, async () => ({ + tools: [RETRIEVAL_TOOL], +})); + +server.setRequestHandler(CallToolRequestSchema, async (request) => { + const { name, arguments: args } = request.params; + + if (name === "retrieve_from_aws_kb") { + const { query, knowledgeBaseId, n = 3 } = args as Record; + try { + const result = await retrieveContext(query, knowledgeBaseId, n); + if (result.isRagWorking) { + return { + content: [ + { type: "text", text: `Context: ${result.context}` }, + { type: "text", text: `RAG Sources: ${JSON.stringify(result.ragSources)}` }, + ], + }; + } else { + return { + content: [{ type: "text", text: "Retrieval failed or returned no results." }], + }; + } + } catch (error) { + return { + content: [{ type: "text", text: `Error occurred: ${error}` }], + }; + } + } else { + return { + content: [{ type: "text", text: `Unknown tool: ${name}` }], + isError: true, + }; + } +}); + +// Server startup +async function runServer() { + const transport = new StdioServerTransport(); + await server.connect(transport); + console.error("AWS KB Retrieval Server running on stdio"); +} + +runServer().catch((error) => { + console.error("Fatal error running server:", error); + process.exit(1); +}); diff --git a/src/aws-kb-retrieval-server/package.json b/src/aws-kb-retrieval-server/package.json new file mode 100644 index 00000000..39ba7bd4 --- /dev/null +++ b/src/aws-kb-retrieval-server/package.json @@ -0,0 +1,30 @@ +{ + "name": "@modelcontextprotocol/server-aws-kb-retrieval", + "version": "0.1.0", + "description": "MCP server for AWS Knowledge Base retrieval using Bedrock Agent Runtime", + "license": "MIT", + "author": "Anthropic, PBC (https://anthropic.com)", + "homepage": "https://modelcontextprotocol.io", + "bugs": "https://github.com/modelcontextprotocol/servers/issues", + "type": "module", + "bin": { + "mcp-server-aws-kb-retrieval": "dist/index.js" + }, + "files": [ + "dist" + ], + "scripts": { + "build": "tsc && shx chmod +x dist/*.js", + "prepare": "npm run build", + "watch": "tsc --watch" + }, + "dependencies": { + "@modelcontextprotocol/sdk": "0.5.0", + "@aws-sdk/client-bedrock-agent-runtime": "^3.0.0" + }, + "devDependencies": { + "@types/node": "^20.10.0", + "shx": "^0.3.4", + "typescript": "^5.6.2" + } +} diff --git a/src/aws-kb-retrieval-server/tsconfig.json b/src/aws-kb-retrieval-server/tsconfig.json new file mode 100644 index 00000000..98b13da0 --- /dev/null +++ b/src/aws-kb-retrieval-server/tsconfig.json @@ -0,0 +1,17 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": ".", + "composite": true, + "incremental": true, + "tsBuildInfoFile": "./dist/.tsbuildinfo" + }, + "include": [ + "./**/*.ts" + ], + "exclude": [ + "node_modules", + "dist" + ] +} From 1096d5cd7520c031abd545d860518706a02dfd45 Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Mon, 2 Dec 2024 15:13:05 -0800 Subject: [PATCH 09/43] edit_file tool --- src/filesystem/index.ts | 234 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 234 insertions(+) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index b4c4e92d..ebe84a08 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -106,6 +106,27 @@ const WriteFileArgsSchema = z.object({ content: z.string(), }); +const EditOperation = z.object({ + startLine: z.number().int().min(1).optional(), + contextLines: z.number().int().min(0).default(3), + oldText: z.string(), + newText: z.string(), + verifyState: z.boolean().default(true), + readBeforeEdit: z.boolean().default(false), + findAnchor: z.string().optional(), + anchorOffset: z.number().int().default(0), + beforeContext: z.string().optional(), + afterContext: z.string().optional(), + contextRadius: z.number().int().min(0).default(3), + insertMode: z.enum(['replace', 'before', 'after']).default('replace'), + dryRun: z.boolean().default(false), +}); + +const EditFileArgsSchema = z.object({ + path: z.string(), + edits: z.array(EditOperation), +}); + const CreateDirectoryArgsSchema = z.object({ path: z.string(), }); @@ -202,6 +223,166 @@ async function searchFiles( return results; } +// Line ending detection and normalization utilities +function detectLineEnding(content: string): string { + // Check if the content contains CRLF + if (content.includes('\r\n')) { + return '\r\n'; + } + // Default to LF + return '\n'; +} + +function normalizeLineEndings(content: string): string { + // Convert all line endings to LF for internal processing + return content.replace(/\r\n/g, '\n'); +} + +function preserveLineEndings(newContent: string, originalLineEnding: string): string { + // Ensure all line endings match the original file + if (originalLineEnding === '\r\n') { + return newContent.replace(/\n/g, '\r\n'); + } + return newContent; +} + +// Edit preview type +interface EditPreview { + originalContent: string; + newContent: string; + lineNumber: number; + matchedAnchor?: string; + contextVerified: boolean; +} + +// File editing utilities +async function applyFileEdits(filePath: string, edits: z.infer[]): Promise { + // Read the file and detect its line endings + let currentContent = await fs.readFile(filePath, 'utf-8'); + const originalLineEnding = detectLineEnding(currentContent); + + // Normalize content for processing + currentContent = normalizeLineEndings(currentContent); + const previews: EditPreview[] = []; + let lines = currentContent.split('\n'); + + // Sort edits by line number in descending order + const sortedEdits = [...edits].sort((a, b) => { + if (a.startLine && b.startLine) { + return b.startLine - a.startLine; + } + return 0; + }); + + for (const edit of sortedEdits) { + // Normalize the edit text for comparison + const normalizedOldText = normalizeLineEndings(edit.oldText); + const normalizedNewText = normalizeLineEndings(edit.newText); + + let startIdx = edit.startLine ? edit.startLine - 1 : -1; + + if (edit.findAnchor) { + // Normalize anchor text and search in normalized content + const normalizedAnchor = normalizeLineEndings(edit.findAnchor); + const content = lines.join('\n'); + const anchorIdx = content.indexOf(normalizedAnchor); + if (anchorIdx === -1) { + throw new Error(`Anchor text not found: ${edit.findAnchor}`); + } + const beforeAnchor = content.substring(0, anchorIdx); + const anchorLine = beforeAnchor.split('\n').length - 1; + startIdx = anchorLine + (edit.anchorOffset || 0); + } + + if (startIdx === -1) { + throw new Error('No valid edit position found - need either startLine or findAnchor'); + } + + // Context verification with normalized line endings + let contextVerified = true; + if (edit.beforeContext || edit.afterContext) { + const radius = edit.contextRadius || 3; + const beforeText = normalizeLineEndings(lines.slice(Math.max(0, startIdx - radius), startIdx).join('\n')); + const afterText = normalizeLineEndings(lines.slice(startIdx + 1, startIdx + radius + 1).join('\n')); + + if (edit.beforeContext && !beforeText.includes(normalizeLineEndings(edit.beforeContext))) { + contextVerified = false; + } + if (edit.afterContext && !afterText.includes(normalizeLineEndings(edit.afterContext))) { + contextVerified = false; + } + + if (!contextVerified && edit.verifyState) { + throw new Error( + `Context verification failed at line ${startIdx + 1}.\n` + + `Expected before context: ${edit.beforeContext}\n` + + `Expected after context: ${edit.afterContext}\n` + + `Found before context: ${beforeText}\n` + + `Found after context: ${afterText}` + ); + } + } + + const oldLines = normalizedOldText.split('\n'); + const newLines = normalizedNewText.split('\n'); + + // Content verification with normalized line endings + if (edit.verifyState) { + const existingContent = normalizeLineEndings(lines.slice(startIdx, startIdx + oldLines.length).join('\n')); + if (existingContent !== normalizedOldText) { + throw new Error( + `Edit validation failed: Content mismatch at line ${startIdx + 1}.\n` + + `Expected:\n${edit.oldText}\n` + + `Found:\n${lines.slice(startIdx, startIdx + oldLines.length).join('\n')}` + ); + } + } + + if (edit.dryRun) { + previews.push({ + originalContent: preserveLineEndings(lines.slice(startIdx, startIdx + oldLines.length).join('\n'), originalLineEnding), + newContent: preserveLineEndings(edit.newText, originalLineEnding), + lineNumber: startIdx + 1, + matchedAnchor: edit.findAnchor, + contextVerified + }); + continue; + } + + // Apply the edit based on insertMode + switch (edit.insertMode) { + case 'before': + lines.splice(startIdx, 0, ...newLines); + break; + case 'after': + lines.splice(startIdx + oldLines.length, 0, ...newLines); + break; + default: // 'replace' + lines.splice(startIdx, oldLines.length, ...newLines); + } + + let updatedContent = lines.join('\n'); + + // Preserve original line endings when writing + updatedContent = preserveLineEndings(updatedContent, originalLineEnding); + + // Re-read file if requested + if (edit.readBeforeEdit) { + await fs.writeFile(filePath, updatedContent, 'utf-8'); + currentContent = await fs.readFile(filePath, 'utf-8'); + currentContent = normalizeLineEndings(currentContent); + lines = currentContent.split('\n'); + } + } + + if (edits.some(e => e.dryRun)) { + return previews; + } + + // Preserve original line endings in final content + return preserveLineEndings(lines.join('\n'), originalLineEnding); +} + // Tool handlers server.setRequestHandler(ListToolsRequestSchema, async () => { return { @@ -233,6 +414,29 @@ server.setRequestHandler(ListToolsRequestSchema, async () => { "Handles text content with proper encoding. Only works within allowed directories.", inputSchema: zodToJsonSchema(WriteFileArgsSchema) as ToolInput, }, + { + name: "edit_file", + description: + "Make selective edits to a text file with advanced pattern matching and validation. " + + "Supports multiple edit modes:\n" + + "1. Line-based: Use startLine to specify exact positions\n" + + "2. Pattern-based: Use findAnchor to locate edit points by matching text\n" + + "3. Context-aware: Verify surrounding text with beforeContext/afterContext\n\n" + + "Features:\n" + + "- Dry run mode for previewing changes (dryRun: true)\n" + + "- Multiple insertion modes: 'replace', 'before', 'after'\n" + + "- Anchor-based positioning with offset support\n" + + "- Automatic state refresh between edits (readBeforeEdit)\n" + + "- Context verification to ensure edit safety\n\n" + + "Recommended workflow:\n" + + "1. Use dryRun to preview changes\n" + + "2. Use findAnchor for resilient positioning\n" + + "3. Enable readBeforeEdit for multi-step changes\n" + + "4. Verify context when position is critical\n\n" + + "This is safer than complete file overwrites as it verifies existing content " + + "and supports granular changes. Only works within allowed directories.", + inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput, + }, { name: "create_directory", description: @@ -346,6 +550,36 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { }; } + case "edit_file": { + const parsed = EditFileArgsSchema.safeParse(args); + if (!parsed.success) { + throw new Error(`Invalid arguments for edit_file: ${parsed.error}`); + } + const validPath = await validatePath(parsed.data.path); + const result = await applyFileEdits(validPath, parsed.data.edits); + + // If it's a dry run, format the previews + if (Array.isArray(result)) { + const previewText = result.map(preview => + `Line ${preview.lineNumber}:\n` + + `${preview.matchedAnchor ? `Matched anchor: ${preview.matchedAnchor}\n` : ''}` + + `Context verified: ${preview.contextVerified}\n` + + `Original:\n${preview.originalContent}\n` + + `New:\n${preview.newContent}\n` + ).join('\n---\n'); + + return { + content: [{ type: "text", text: `Edit preview:\n${previewText}` }], + }; + } + + // Otherwise write the changes + await fs.writeFile(validPath, result, "utf-8"); + return { + content: [{ type: "text", text: `Successfully applied edits to ${parsed.data.path}` }], + }; + } + case "create_directory": { const parsed = CreateDirectoryArgsSchema.safeParse(args); if (!parsed.success) { From 9f2a77e044c59b48e3d6b89147c34499c4513795 Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Mon, 2 Dec 2024 17:43:17 -0800 Subject: [PATCH 10/43] updated readme --- src/filesystem/README.md | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/src/filesystem/README.md b/src/filesystem/README.md index c2950cd5..99337d3c 100644 --- a/src/filesystem/README.md +++ b/src/filesystem/README.md @@ -36,6 +36,35 @@ Node.js server implementing Model Context Protocol (MCP) for filesystem operatio - `path` (string): File location - `content` (string): File content +- **edit_file** + - Make selective edits to files with advanced pattern matching + - Features: + - Multiple positioning modes: + - Line-based: Specify exact line numbers + - Pattern-based: Find positions using anchor text + - Context-aware: Verify surrounding content + - Insert modes: 'replace', 'before', or 'after' content + - Dry run preview of changes + - Cross-platform line ending support (CRLF/LF) + - Git-friendly content verification + - Inputs: + - `path` (string): File to edit + - `edits` (array): List of edit operations + - `startLine?` (number): Line number for edit (optional) + - `findAnchor?` (string): Text to locate edit position (optional) + - `anchorOffset` (number): Lines to offset from anchor (default: 0) + - `oldText` (string): Content to replace/verify + - `newText` (string): New content to insert + - `insertMode` (string): 'replace', 'before', or 'after' (default: 'replace') + - `beforeContext?` (string): Expected content before edit point (optional) + - `afterContext?` (string): Expected content after edit point (optional) + - `contextRadius` (number): Lines to check for context (default: 3) + - `verifyState` (boolean): Verify content matches before editing (default: true) + - `readBeforeEdit` (boolean): Refresh file state between edits (default: false) + - `dryRun` (boolean): Preview changes without applying them (default: false) + - Returns preview information for dry runs, otherwise applies changes + - Preserves original line endings and handles Git auto CRLF/LF + - **create_directory** - Create new directory or ensure it exists - Input: `path` (string) From a1bc14d38dc6b224b6074045c4794c353b21b19a Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Mon, 2 Dec 2024 17:48:19 -0800 Subject: [PATCH 11/43] improve line ending docs --- src/filesystem/index.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index ebe84a08..8ebae2fa 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -224,6 +224,12 @@ async function searchFiles( } // Line ending detection and normalization utilities +// These functions ensure consistent behavior across different platforms and Git configurations. +// They handle the following scenarios: +// - Windows CRLF (\r\n) vs Unix LF (\n) line endings +// - Git's core.autocrlf setting converting line endings +// - Mixed line endings within the same file +// This makes the edit functionality reliable regardless of the development environment. function detectLineEnding(content: string): string { // Check if the content contains CRLF if (content.includes('\r\n')) { From 431d90971722824e7c2966a4399b235f09cf3c52 Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Mon, 2 Dec 2024 18:03:36 -0800 Subject: [PATCH 12/43] indentation support --- src/filesystem/index.ts | 126 +++++++++++++++++++++++----------------- 1 file changed, 73 insertions(+), 53 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index 8ebae2fa..44aa9480 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -223,13 +223,34 @@ async function searchFiles( return results; } -// Line ending detection and normalization utilities -// These functions ensure consistent behavior across different platforms and Git configurations. -// They handle the following scenarios: -// - Windows CRLF (\r\n) vs Unix LF (\n) line endings -// - Git's core.autocrlf setting converting line endings -// - Mixed line endings within the same file -// This makes the edit functionality reliable regardless of the development environment. +// Content normalization utilities +// These functions handle: +// - Line ending normalization (CRLF vs LF) +// - Indentation preservation and normalization +// - Git's core.autocrlf setting +// - Mixed line endings +// This makes the edit functionality reliable across different environments and formatting styles + +function normalizeForComparison(content: string): string { + // First normalize line endings + let normalized = content.replace(/\r\n/g, '\n'); + // Remove leading/trailing whitespace from each line while preserving empty lines + normalized = normalized.split('\n') + .map(line => line.trim()) + .join('\n'); + return normalized; +} + +function preserveIndentation(newContent: string, originalContent: string): string { + const originalLines = originalContent.split(/\r?\n/); + const indentMatch = originalLines.find(line => line.trim())?.match(/^\s*/); + const baseIndent = indentMatch ? indentMatch[0] : ''; + + return newContent.split(/\r?\n/) + .map(line => line.trim() ? baseIndent + line : line) + .join(originalContent.includes('\r\n') ? '\r\n' : '\n'); +} + function detectLineEnding(content: string): string { // Check if the content contains CRLF if (content.includes('\r\n')) { @@ -263,14 +284,10 @@ interface EditPreview { // File editing utilities async function applyFileEdits(filePath: string, edits: z.infer[]): Promise { - // Read the file and detect its line endings + // Read the file content let currentContent = await fs.readFile(filePath, 'utf-8'); - const originalLineEnding = detectLineEnding(currentContent); - - // Normalize content for processing - currentContent = normalizeLineEndings(currentContent); const previews: EditPreview[] = []; - let lines = currentContent.split('\n'); + let lines = currentContent.split(/\r?\n/); // Sort edits by line number in descending order const sortedEdits = [...edits].sort((a, b) => { @@ -281,73 +298,76 @@ async function applyFileEdits(filePath: string, edits: z.infer Date: Mon, 2 Dec 2024 19:03:48 -0800 Subject: [PATCH 13/43] line numbering improvements --- src/filesystem/index.ts | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index 44aa9480..ed6f627d 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -301,20 +301,21 @@ async function applyFileEdits(filePath: string, edits: z.infer Date: Mon, 2 Dec 2024 19:21:05 -0800 Subject: [PATCH 14/43] simplify text replacement approach for better reliability --- src/filesystem/index.ts | 203 +++++++++++++++------------------------- 1 file changed, 77 insertions(+), 126 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index ed6f627d..f88036de 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -107,19 +107,30 @@ const WriteFileArgsSchema = z.object({ }); const EditOperation = z.object({ - startLine: z.number().int().min(1).optional(), - contextLines: z.number().int().min(0).default(3), - oldText: z.string(), - newText: z.string(), - verifyState: z.boolean().default(true), - readBeforeEdit: z.boolean().default(false), - findAnchor: z.string().optional(), - anchorOffset: z.number().int().default(0), - beforeContext: z.string().optional(), - afterContext: z.string().optional(), - contextRadius: z.number().int().min(0).default(3), - insertMode: z.enum(['replace', 'before', 'after']).default('replace'), - dryRun: z.boolean().default(false), + // Primary edit specification + oldText: z.string().describe('Exact text to match, including whitespace/formatting'), + newText: z.string().describe('Replacement text with desired formatting'), + + // Location finding (one of these should be provided) + startLine: z.number().int().min(1).optional().describe('Exact line number to start edit'), + findAnchor: z.string().optional().describe('Text to search for to locate edit position'), + + // Edit behavior + insertMode: z.enum(['replace', 'before', 'after']).default('replace') + .describe('Whether to replace matched text or insert before/after it'), + verifyState: z.boolean().default(true) + .describe('Whether to verify exact text matches before editing'), + readBeforeEdit: z.boolean().default(false) + .describe('Whether to re-read file between multiple edits'), + + // Optional context verification + beforeContext: z.string().optional().describe('Text that should appear before edit point'), + afterContext: z.string().optional().describe('Text that should appear after edit point'), + contextRadius: z.number().int().min(0).default(3) + .describe('Number of lines to check for context matches'), + + // Preview mode + dryRun: z.boolean().default(false).describe('Preview changes without applying them'), }); const EditFileArgsSchema = z.object({ @@ -224,53 +235,14 @@ async function searchFiles( } // Content normalization utilities -// These functions handle: -// - Line ending normalization (CRLF vs LF) -// - Indentation preservation and normalization -// - Git's core.autocrlf setting -// - Mixed line endings -// This makes the edit functionality reliable across different environments and formatting styles - +// Used only for fuzzy matching of anchor text and context verification +// Does not affect the actual content replacement function normalizeForComparison(content: string): string { - // First normalize line endings - let normalized = content.replace(/\r\n/g, '\n'); - // Remove leading/trailing whitespace from each line while preserving empty lines - normalized = normalized.split('\n') + // Normalize line endings and whitespace for comparison only + return content.replace(/\r\n/g, '\n') + .split('\n') .map(line => line.trim()) .join('\n'); - return normalized; -} - -function preserveIndentation(newContent: string, originalContent: string): string { - const originalLines = originalContent.split(/\r?\n/); - const indentMatch = originalLines.find(line => line.trim())?.match(/^\s*/); - const baseIndent = indentMatch ? indentMatch[0] : ''; - - return newContent.split(/\r?\n/) - .map(line => line.trim() ? baseIndent + line : line) - .join(originalContent.includes('\r\n') ? '\r\n' : '\n'); -} - -function detectLineEnding(content: string): string { - // Check if the content contains CRLF - if (content.includes('\r\n')) { - return '\r\n'; - } - // Default to LF - return '\n'; -} - -function normalizeLineEndings(content: string): string { - // Convert all line endings to LF for internal processing - return content.replace(/\r\n/g, '\n'); -} - -function preserveLineEndings(newContent: string, originalLineEnding: string): string { - // Ensure all line endings match the original file - if (originalLineEnding === '\r\n') { - return newContent.replace(/\n/g, '\r\n'); - } - return newContent; } // Edit preview type @@ -287,48 +259,37 @@ async function applyFileEdits(filePath: string, edits: z.infer { - if (a.startLine && b.startLine) { - return b.startLine - a.startLine; - } - return 0; - }); - for (const edit of sortedEdits) { - let startIdx = edit.startLine ? edit.startLine - 1 : -1; + for (const edit of edits) { + let editContent = currentContent; + let editPosition = -1; + // Find the edit position using anchor if provided if (edit.findAnchor) { - // Use line-by-line comparison for accurate anchor matching - let foundLine = -1; + const normalizedContent = normalizeForComparison(currentContent); const normalizedAnchor = normalizeForComparison(edit.findAnchor); + const anchorPos = normalizedContent.indexOf(normalizedAnchor); - for (let i = 0; i < lines.length; i++) { - const normalizedLine = normalizeForComparison(lines[i]); - if (normalizedLine.includes(normalizedAnchor)) { - foundLine = i; - break; - } - } - if (foundLine === -1) { + if (anchorPos === -1) { throw new Error(`Edit failed - anchor text not found: ${edit.findAnchor} in ${filePath}`); } - startIdx = foundLine + (edit.anchorOffset || 0); - } - - if (startIdx === -1) { + + // Map normalized position back to original content + editPosition = currentContent.slice(0, anchorPos).split('\n').length - 1; + } else if (edit.startLine) { + editPosition = edit.startLine - 1; + } else { throw new Error(`Edit failed - no valid position found in ${filePath}. Operation requires either startLine or findAnchor`); } - - // Context verification with normalized comparison - let contextVerified = true; + + // Verify context if provided if (edit.beforeContext || edit.afterContext) { + const lines = currentContent.split('\n'); const radius = edit.contextRadius || 3; - const beforeText = lines.slice(Math.max(0, startIdx - radius), startIdx).join('\n'); - const afterText = lines.slice(startIdx + 1, startIdx + radius + 1).join('\n'); + const beforeText = lines.slice(Math.max(0, editPosition - radius), editPosition).join('\n'); + const afterText = lines.slice(editPosition + 1, editPosition + radius + 1).join('\n'); + let contextVerified = true; if (edit.beforeContext && !normalizeForComparison(beforeText).includes(normalizeForComparison(edit.beforeContext))) { contextVerified = false; } @@ -338,67 +299,58 @@ async function applyFileEdits(filePath: string, edits: z.infer Date: Mon, 2 Dec 2024 19:38:33 -0800 Subject: [PATCH 15/43] simplify edit_file to use git-style diffs and substring matching --- src/filesystem/index.ts | 187 ++++++++-------------------------------- 1 file changed, 38 insertions(+), 149 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index f88036de..ff113afe 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -107,30 +107,12 @@ const WriteFileArgsSchema = z.object({ }); const EditOperation = z.object({ - // Primary edit specification - oldText: z.string().describe('Exact text to match, including whitespace/formatting'), - newText: z.string().describe('Replacement text with desired formatting'), - - // Location finding (one of these should be provided) - startLine: z.number().int().min(1).optional().describe('Exact line number to start edit'), - findAnchor: z.string().optional().describe('Text to search for to locate edit position'), - - // Edit behavior - insertMode: z.enum(['replace', 'before', 'after']).default('replace') - .describe('Whether to replace matched text or insert before/after it'), - verifyState: z.boolean().default(true) - .describe('Whether to verify exact text matches before editing'), - readBeforeEdit: z.boolean().default(false) - .describe('Whether to re-read file between multiple edits'), - - // Optional context verification - beforeContext: z.string().optional().describe('Text that should appear before edit point'), - afterContext: z.string().optional().describe('Text that should appear after edit point'), - contextRadius: z.number().int().min(0).default(3) - .describe('Number of lines to check for context matches'), - - // Preview mode - dryRun: z.boolean().default(false).describe('Preview changes without applying them'), + // The text to search for + oldText: z.string().describe('Text to search for - can be a substring of the target'), + // The new text to replace with + newText: z.string().describe('Text to replace the found text with'), + // Optional: preview changes without applying them + dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format') }); const EditFileArgsSchema = z.object({ @@ -234,131 +216,59 @@ async function searchFiles( return results; } -// Content normalization utilities -// Used only for fuzzy matching of anchor text and context verification -// Does not affect the actual content replacement -function normalizeForComparison(content: string): string { - // Normalize line endings and whitespace for comparison only - return content.replace(/\r\n/g, '\n') - .split('\n') - .map(line => line.trim()) - .join('\n'); -} - // Edit preview type interface EditPreview { - originalContent: string; - newContent: string; + original: string; + modified: string; lineNumber: number; - matchedAnchor?: string; - contextVerified: boolean; + preview: string; // Git-style diff format } // File editing utilities async function applyFileEdits(filePath: string, edits: z.infer[]): Promise { - // Read the file content - let currentContent = await fs.readFile(filePath, 'utf-8'); + let content = await fs.readFile(filePath, 'utf-8'); const previews: EditPreview[] = []; for (const edit of edits) { - let editContent = currentContent; - let editPosition = -1; - - // Find the edit position using anchor if provided - if (edit.findAnchor) { - const normalizedContent = normalizeForComparison(currentContent); - const normalizedAnchor = normalizeForComparison(edit.findAnchor); - const anchorPos = normalizedContent.indexOf(normalizedAnchor); - - if (anchorPos === -1) { - throw new Error(`Edit failed - anchor text not found: ${edit.findAnchor} in ${filePath}`); - } - - // Map normalized position back to original content - editPosition = currentContent.slice(0, anchorPos).split('\n').length - 1; - } else if (edit.startLine) { - editPosition = edit.startLine - 1; - } else { - throw new Error(`Edit failed - no valid position found in ${filePath}. Operation requires either startLine or findAnchor`); - } - - // Verify context if provided - if (edit.beforeContext || edit.afterContext) { - const lines = currentContent.split('\n'); - const radius = edit.contextRadius || 3; - const beforeText = lines.slice(Math.max(0, editPosition - radius), editPosition).join('\n'); - const afterText = lines.slice(editPosition + 1, editPosition + radius + 1).join('\n'); - - let contextVerified = true; - if (edit.beforeContext && !normalizeForComparison(beforeText).includes(normalizeForComparison(edit.beforeContext))) { - contextVerified = false; - } - if (edit.afterContext && !normalizeForComparison(afterText).includes(normalizeForComparison(edit.afterContext))) { - contextVerified = false; - } - - if (!contextVerified && edit.verifyState) { - throw new Error( - `Edit failed - context verification failed in ${filePath} at line ${editPosition + 1}\n` + - `Expected before context: ${edit.beforeContext}\n` + - `Expected after context: ${edit.afterContext}\n` + - `Found before context: ${beforeText}\n` + - `Found after context: ${afterText}\n` - ); - } - } - - // Look for exact match of oldText - const searchStr = edit.oldText; - const searchPos = currentContent.indexOf(searchStr); - - if (searchPos === -1 && edit.verifyState) { + const pos = content.indexOf(edit.oldText); + if (pos === -1) { throw new Error( - `Edit failed - content not found in ${filePath}\n` + - `Expected to find:\n${searchStr}\n` + `Search text not found in ${filePath}:\n${edit.oldText}` ); } + // Calculate line number for reporting + const lineNumber = content.slice(0, pos).split(/\r?\n/).length; + if (edit.dryRun) { + // Create git-style diff preview + const preview = [ + `@@ line ${lineNumber} @@`, + '<<<<<<< ORIGINAL', + edit.oldText, + '=======', + edit.newText, + '>>>>>>> MODIFIED' + ].join('\n'); + previews.push({ - originalContent: searchStr, - newContent: edit.newText, - lineNumber: editPosition + 1, - matchedAnchor: edit.findAnchor, - contextVerified: true + original: edit.oldText, + modified: edit.newText, + lineNumber, + preview }); continue; } - // Apply the edit based on insertMode - switch (edit.insertMode) { - case 'before': - editContent = currentContent.slice(0, searchPos) + - edit.newText + currentContent.slice(searchPos); - break; - case 'after': - editContent = currentContent.slice(0, searchPos + searchStr.length) + - edit.newText + currentContent.slice(searchPos + searchStr.length); - break; - default: // 'replace' - editContent = currentContent.slice(0, searchPos) + - edit.newText + currentContent.slice(searchPos + searchStr.length); - } - - // Update content for next edit - if (edit.readBeforeEdit) { - await fs.writeFile(filePath, editContent, 'utf-8'); - currentContent = await fs.readFile(filePath, 'utf-8'); - } else { - currentContent = editContent; - } + // Apply the edit + content = content.slice(0, pos) + edit.newText + content.slice(pos + edit.oldText.length); } if (edits.some(e => e.dryRun)) { return previews; } - return currentContent; + return content; } // Tool handlers @@ -395,24 +305,10 @@ server.setRequestHandler(ListToolsRequestSchema, async () => { { name: "edit_file", description: - "Make selective edits to a text file with advanced pattern matching and validation. " + - "Supports multiple edit modes:\n" + - "1. Line-based: Use startLine to specify exact positions\n" + - "2. Pattern-based: Use findAnchor to locate edit points by matching text\n" + - "3. Context-aware: Verify surrounding text with beforeContext/afterContext\n\n" + - "Features:\n" + - "- Dry run mode for previewing changes (dryRun: true)\n" + - "- Multiple insertion modes: 'replace', 'before', 'after'\n" + - "- Anchor-based positioning with offset support\n" + - "- Automatic state refresh between edits (readBeforeEdit)\n" + - "- Context verification to ensure edit safety\n\n" + - "Recommended workflow:\n" + - "1. Use dryRun to preview changes\n" + - "2. Use findAnchor for resilient positioning\n" + - "3. Enable readBeforeEdit for multi-step changes\n" + - "4. Verify context when position is critical\n\n" + - "This is safer than complete file overwrites as it verifies existing content " + - "and supports granular changes. Only works within allowed directories.", + "Make selective edits to a text file using simple search and replace with git-style preview format. " + + "Finds text to replace using substring matching and shows changes in a familiar git-diff format. " + + "Use dry run mode to preview changes before applying them. " + + "Only works within allowed directories.", inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput, }, { @@ -538,14 +434,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { // If it's a dry run, format the previews if (Array.isArray(result)) { - const previewText = result.map(preview => - `Line ${preview.lineNumber}:\n` + - `${preview.matchedAnchor ? `Matched anchor: ${preview.matchedAnchor}\n` : ''}` + - `Context verified: ${preview.contextVerified}\n` + - `Original:\n${preview.originalContent}\n` + - `New:\n${preview.newContent}\n` - ).join('\n---\n'); - + const previewText = result.map(preview => preview.preview).join('\n\n'); return { content: [{ type: "text", text: `Edit preview:\n${previewText}` }], }; From da0ef8106740069b77876843f22570aa870a80df Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Tue, 3 Dec 2024 07:21:59 -0800 Subject: [PATCH 16/43] update readme --- src/filesystem/README.md | 32 +++++++++----------------------- 1 file changed, 9 insertions(+), 23 deletions(-) diff --git a/src/filesystem/README.md b/src/filesystem/README.md index 99337d3c..f0e34cd0 100644 --- a/src/filesystem/README.md +++ b/src/filesystem/README.md @@ -37,33 +37,19 @@ Node.js server implementing Model Context Protocol (MCP) for filesystem operatio - `content` (string): File content - **edit_file** - - Make selective edits to files with advanced pattern matching + - Make selective edits using search and replace - Features: - - Multiple positioning modes: - - Line-based: Specify exact line numbers - - Pattern-based: Find positions using anchor text - - Context-aware: Verify surrounding content - - Insert modes: 'replace', 'before', or 'after' content - - Dry run preview of changes - - Cross-platform line ending support (CRLF/LF) - - Git-friendly content verification + - Simple substring matching for finding text + - Git-style preview format for changes + - Preview changes with dry run mode + - Preserves original file formatting and indentation - Inputs: - `path` (string): File to edit - `edits` (array): List of edit operations - - `startLine?` (number): Line number for edit (optional) - - `findAnchor?` (string): Text to locate edit position (optional) - - `anchorOffset` (number): Lines to offset from anchor (default: 0) - - `oldText` (string): Content to replace/verify - - `newText` (string): New content to insert - - `insertMode` (string): 'replace', 'before', or 'after' (default: 'replace') - - `beforeContext?` (string): Expected content before edit point (optional) - - `afterContext?` (string): Expected content after edit point (optional) - - `contextRadius` (number): Lines to check for context (default: 3) - - `verifyState` (boolean): Verify content matches before editing (default: true) - - `readBeforeEdit` (boolean): Refresh file state between edits (default: false) - - `dryRun` (boolean): Preview changes without applying them (default: false) + - `oldText` (string): Text to search for (can be substring) + - `newText` (string): Text to replace with + - `dryRun` (boolean): Preview changes without applying (default: false) - Returns preview information for dry runs, otherwise applies changes - - Preserves original line endings and handles Git auto CRLF/LF - **create_directory** - Create new directory or ensure it exists @@ -127,4 +113,4 @@ Add this to your `claude_desktop_config.json`: ## License -This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. +This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. \ No newline at end of file From de4976903408f72e7c1a7f2af442526bf4a63642 Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Tue, 3 Dec 2024 08:51:01 -0800 Subject: [PATCH 17/43] limitations added to readme --- src/filesystem/README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/filesystem/README.md b/src/filesystem/README.md index f0e34cd0..31995d8f 100644 --- a/src/filesystem/README.md +++ b/src/filesystem/README.md @@ -42,7 +42,11 @@ Node.js server implementing Model Context Protocol (MCP) for filesystem operatio - Simple substring matching for finding text - Git-style preview format for changes - Preview changes with dry run mode - - Preserves original file formatting and indentation + - Preserves consistent indentation patterns + - Limitations: + - Intended for content changes, not code formatting + - Mixed tabs/spaces can cause pattern matching issues + - Use code formatters (e.g., Prettier, ESLint) before content edits - Inputs: - `path` (string): File to edit - `edits` (array): List of edit operations From 506eabab398353a587f28dd5c022810ab3fd53dc Mon Sep 17 00:00:00 2001 From: Jerad Bitner Date: Tue, 3 Dec 2024 13:21:00 -0800 Subject: [PATCH 18/43] fix: update listIssues and updateIssue function signatures --- src/github/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/github/index.ts b/src/github/index.ts index f3c35e86..d861cbbb 100644 --- a/src/github/index.ts +++ b/src/github/index.ts @@ -473,7 +473,7 @@ async function createRepository( async function listIssues( owner: string, repo: string, - options: z.infer + options: Omit, 'owner' | 'repo'> ): Promise { const url = new URL(`https://api.github.com/repos/${owner}/${repo}/issues`); @@ -505,7 +505,7 @@ async function updateIssue( owner: string, repo: string, issueNumber: number, - options: z.infer + options: Omit, 'owner' | 'repo' | 'issue_number'> ): Promise { const response = await fetch( `https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}`, From dc9f56720feb4edc15d6dbd7d51b8c4ddd6ec92c Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Tue, 3 Dec 2024 16:45:04 -0800 Subject: [PATCH 19/43] same compare for edit and dry run --- src/filesystem/index.ts | 62 +++++++++++++++++------------------------ 1 file changed, 26 insertions(+), 36 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index ff113afe..bf433596 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -111,13 +111,13 @@ const EditOperation = z.object({ oldText: z.string().describe('Text to search for - can be a substring of the target'), // The new text to replace with newText: z.string().describe('Text to replace the found text with'), - // Optional: preview changes without applying them - dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format') }); const EditFileArgsSchema = z.object({ path: z.string(), edits: z.array(EditOperation), + // Optional: preview changes without applying them + dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format') }); const CreateDirectoryArgsSchema = z.object({ @@ -225,7 +225,7 @@ interface EditPreview { } // File editing utilities -async function applyFileEdits(filePath: string, edits: z.infer[]): Promise { +async function applyFileEdits(filePath: string, edits: Array<{oldText: string, newText: string}>, dryRun: boolean = false): Promise { let content = await fs.readFile(filePath, 'utf-8'); const previews: EditPreview[] = []; @@ -237,38 +237,29 @@ async function applyFileEdits(filePath: string, edits: z.infer>>>>>> MODIFIED' + ].join('\n'); - if (edit.dryRun) { - // Create git-style diff preview - const preview = [ - `@@ line ${lineNumber} @@`, - '<<<<<<< ORIGINAL', - edit.oldText, - '=======', - edit.newText, - '>>>>>>> MODIFIED' - ].join('\n'); - - previews.push({ - original: edit.oldText, - modified: edit.newText, - lineNumber, - preview - }); - continue; - } + previews.push({ + original: edit.oldText, + modified: edit.newText, + lineNumber, + preview + }); - // Apply the edit - content = content.slice(0, pos) + edit.newText + content.slice(pos + edit.oldText.length); - } - - if (edits.some(e => e.dryRun)) { - return previews; + if (!dryRun) { + content = content.slice(0, pos) + edit.newText + content.slice(pos + edit.oldText.length); + } } - return content; + return dryRun ? previews : content; } // Tool handlers @@ -430,18 +421,17 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { throw new Error(`Invalid arguments for edit_file: ${parsed.error}`); } const validPath = await validatePath(parsed.data.path); - const result = await applyFileEdits(validPath, parsed.data.edits); + const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun); // If it's a dry run, format the previews - if (Array.isArray(result)) { - const previewText = result.map(preview => preview.preview).join('\n\n'); + if (parsed.data.dryRun) { + const previewText = (result as EditPreview[]).map(preview => preview.preview).join('\n\n'); return { content: [{ type: "text", text: `Edit preview:\n${previewText}` }], }; } - - // Otherwise write the changes - await fs.writeFile(validPath, result, "utf-8"); + + await fs.writeFile(validPath, result as string, "utf-8"); return { content: [{ type: "text", text: `Successfully applied edits to ${parsed.data.path}` }], }; From 44f68efbdcdf8edaf30594f6319810d2a12d030c Mon Sep 17 00:00:00 2001 From: Himanshu Ladia Date: Wed, 4 Dec 2024 14:58:47 +0530 Subject: [PATCH 20/43] add capability to list commits of a branch --- src/github/index.ts | 48 ++++++++++++++++++++++++++++++++++++++++++- src/github/schemas.ts | 9 ++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) diff --git a/src/github/index.ts b/src/github/index.ts index 800bce83..06cdc7d4 100644 --- a/src/github/index.ts +++ b/src/github/index.ts @@ -41,7 +41,8 @@ import { CreateIssueSchema, CreatePullRequestSchema, ForkRepositorySchema, - CreateBranchSchema + CreateBranchSchema, + ListCommitsSchema } from './schemas.js'; import { z } from 'zod'; import { zodToJsonSchema } from 'zod-to-json-schema'; @@ -467,6 +468,40 @@ async function createRepository( return GitHubRepositorySchema.parse(await response.json()); } +async function listCommits( + owner: string, + repo: string, + page: number = 1, + perPage: number = 30, + sha?: string, +): Promise { + const url = new URL(`https://api.github.com/repos/${owner}/${repo}/commits`); + url.searchParams.append("page", page.toString()); + url.searchParams.append("per_page", perPage.toString()); + if (sha) { + url.searchParams.append("sha", sha); + } + + const response = await fetch( + url.toString(), + { + method: "GET", + headers: { + "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, + "Accept": "application/vnd.github.v3+json", + "User-Agent": "github-mcp-server", + "Content-Type": "application/json" + }, + } + ); + + if (!response.ok) { + throw new Error(`GitHub API error: ${response.statusText}`); + } + + return GitHubCommitSchema.array().parse(await response.json()); +} + server.setRequestHandler(ListToolsRequestSchema, async () => { return { tools: [ @@ -514,6 +549,11 @@ server.setRequestHandler(ListToolsRequestSchema, async () => { name: "create_branch", description: "Create a new branch in a GitHub repository", inputSchema: zodToJsonSchema(CreateBranchSchema) + }, + { + name: "list_commits", + description: "Get list of commits of a branch in a GitHub repository", + inputSchema: zodToJsonSchema(ListCommitsSchema) } ] }; @@ -623,6 +663,12 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { return { content: [{ type: "text", text: JSON.stringify(pullRequest, null, 2) }] }; } + case "list_commits": { + const args = ListCommitsSchema.parse(request.params.arguments); + const results = await listCommits(args.owner, args.repo, args.page, args.perPage, args.sha); + return { content: [{ type: "text", text: JSON.stringify(results, null, 2) }] }; + } + default: throw new Error(`Unknown tool: ${request.params.name}`); } diff --git a/src/github/schemas.ts b/src/github/schemas.ts index 213458eb..defc2569 100644 --- a/src/github/schemas.ts +++ b/src/github/schemas.ts @@ -308,6 +308,15 @@ export const SearchRepositoriesSchema = z.object({ perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)") }); +export const ListCommitsSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + page: z.number().optional().describe("Page number for pagination (default: 1)"), + perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)"), + sha: z.string().optional() + .describe("SHA of the file being replaced (required when updating existing files)") +}); + export const CreateRepositorySchema = z.object({ name: z.string().describe("Repository name"), description: z.string().optional().describe("Repository description"), From 18c3b9fbc76417d7a474a107847b5b8a91c9e5c1 Mon Sep 17 00:00:00 2001 From: "Dana K. Williams" Date: Wed, 4 Dec 2024 06:04:42 -0500 Subject: [PATCH 21/43] Add MySQL MCP server to Community Servers --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ea3d3dfc..54cdb741 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,7 @@ A growing set of community-developed and maintained servers demonstrates various - **[MCP Installer](https://github.com/anaisbetts/mcp-installer)** - This server is a server that installs other MCP servers for you. - **[Spotify MCP](https://github.com/varunneal/spotify-mcp)** - This MCP allows an LLM to play and use Spotify. - **[Inoyu](https://github.com/sergehuber/inoyu-mcp-unomi-server)** - Interact with an Apache Unomi CDP customer data platform to retrieve and update customer profiles +- **[MySQL MCP](https://github.com/designcomputer/mysql_mcp_server)** - MySQL database integration with configurable access controls, schema inspection, and comprehensive security guidelines - **[BigQuery](https://github.com/LucasHild/mcp-server-bigquery)** (by LucasHild) - This server enables LLMs to inspect database schemas and execute queries on BigQuery. - **[BigQuery](https://github.com/ergut/mcp-bigquery-server)** (by ergut) - Server implementation for Google BigQuery integration that enables direct BigQuery database access and querying capabilities From c31f00f54fb70222dd9e8e8c4b17466151414718 Mon Sep 17 00:00:00 2001 From: Himanshu Ladia Date: Wed, 4 Dec 2024 17:29:56 +0530 Subject: [PATCH 22/43] fix contract --- src/github/index.ts | 8 +++++--- src/github/schemas.ts | 20 ++++++++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/src/github/index.ts b/src/github/index.ts index 06cdc7d4..66cae6a1 100644 --- a/src/github/index.ts +++ b/src/github/index.ts @@ -18,6 +18,7 @@ import { GitHubSearchResponseSchema, GitHubTreeSchema, GitHubCommitSchema, + GitHubListCommitsSchema, CreateRepositoryOptionsSchema, CreateIssueOptionsSchema, CreatePullRequestOptionsSchema, @@ -42,7 +43,8 @@ import { CreatePullRequestSchema, ForkRepositorySchema, CreateBranchSchema, - ListCommitsSchema + ListCommitsSchema, + GitHubListCommits } from './schemas.js'; import { z } from 'zod'; import { zodToJsonSchema } from 'zod-to-json-schema'; @@ -474,7 +476,7 @@ async function listCommits( page: number = 1, perPage: number = 30, sha?: string, -): Promise { +): Promise { const url = new URL(`https://api.github.com/repos/${owner}/${repo}/commits`); url.searchParams.append("page", page.toString()); url.searchParams.append("per_page", perPage.toString()); @@ -499,7 +501,7 @@ async function listCommits( throw new Error(`GitHub API error: ${response.statusText}`); } - return GitHubCommitSchema.array().parse(await response.json()); + return GitHubListCommitsSchema.parse(await response.json()); } server.setRequestHandler(ListToolsRequestSchema, async () => { diff --git a/src/github/schemas.ts b/src/github/schemas.ts index defc2569..ad9f30c8 100644 --- a/src/github/schemas.ts +++ b/src/github/schemas.ts @@ -93,6 +93,25 @@ export const GitHubTreeSchema = z.object({ truncated: z.boolean() }); +export const GitHubListCommitsSchema = z.array(z.object({ + sha: z.string(), + node_id: z.string(), + commit: z.object({ + author: GitHubAuthorSchema, + committer: GitHubAuthorSchema, + message: z.string(), + tree: z.object({ + sha: z.string(), + url: z.string() + }), + url: z.string(), + comment_count: z.number(), + }), + url: z.string(), + html_url: z.string(), + comments_url: z.string() +})); + export const GitHubCommitSchema = z.object({ sha: z.string(), node_id: z.string(), @@ -378,6 +397,7 @@ export type GitHubContent = z.infer; export type FileOperation = z.infer; export type GitHubTree = z.infer; export type GitHubCommit = z.infer; +export type GitHubListCommits = z.infer; export type GitHubReference = z.infer; export type CreateRepositoryOptions = z.infer; export type CreateIssueOptions = z.infer; From 215574f511fa05d5bf9393bbcfe2b9df03eb0d40 Mon Sep 17 00:00:00 2001 From: Himanshu Ladia Date: Wed, 4 Dec 2024 18:16:23 +0530 Subject: [PATCH 23/43] update read me --- src/github/README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/github/README.md b/src/github/README.md index cfd268a8..de29874c 100644 --- a/src/github/README.md +++ b/src/github/README.md @@ -102,6 +102,16 @@ MCP Server for the GitHub API, enabling file operations, repository management, - `from_branch` (optional string): Source branch (defaults to repo default) - Returns: Created branch reference +10. `list_commits` + - Gets commits of a branch in a repository + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `page` (optional string): page number + - `per_page` (optional string): number of record per page + - `sha` (optional string): branch name + - Returns: List of commits + ## Setup ### Personal Access Token From 3dc616b5ece33c4b18cf60ac9995cef4466da7d1 Mon Sep 17 00:00:00 2001 From: "Dana K. Williams" Date: Wed, 4 Dec 2024 08:56:31 -0500 Subject: [PATCH 24/43] Update MySQL server description for consistency --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 54cdb741..967d05a0 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ A growing set of community-developed and maintained servers demonstrates various - **[MCP Installer](https://github.com/anaisbetts/mcp-installer)** - This server is a server that installs other MCP servers for you. - **[Spotify MCP](https://github.com/varunneal/spotify-mcp)** - This MCP allows an LLM to play and use Spotify. - **[Inoyu](https://github.com/sergehuber/inoyu-mcp-unomi-server)** - Interact with an Apache Unomi CDP customer data platform to retrieve and update customer profiles -- **[MySQL MCP](https://github.com/designcomputer/mysql_mcp_server)** - MySQL database integration with configurable access controls, schema inspection, and comprehensive security guidelines +- **[MySQL](https://github.com/designcomputer/mysql_mcp_server)** - MySQL database integration with configurable access controls and schema inspection - **[BigQuery](https://github.com/LucasHild/mcp-server-bigquery)** (by LucasHild) - This server enables LLMs to inspect database schemas and execute queries on BigQuery. - **[BigQuery](https://github.com/ergut/mcp-bigquery-server)** (by ergut) - Server implementation for Google BigQuery integration that enables direct BigQuery database access and querying capabilities From 1ec75e87fc6ab1e843e866ac4fffa8cd4cd535af Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Wed, 4 Dec 2024 07:27:02 -0800 Subject: [PATCH 25/43] improve multiline text editing reliability by tracking positions and applying edits in reverse order --- src/filesystem/index.ts | 55 ++++++++++++++++++++++++++++++----------- 1 file changed, 40 insertions(+), 15 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index bf433596..e736b1c1 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -216,6 +216,29 @@ async function searchFiles( return results; } +interface Position { + start: number; + end: number; + lineNumber: number; +} + +function findTextPosition(content: string, searchText: string): Position { + // Handle different line endings + const normalized = content.replace(/\r\n/g, '\n'); + const searchNormalized = searchText.replace(/\r\n/g, '\n'); + + const pos = normalized.indexOf(searchNormalized); + if (pos === -1) { + throw new Error(`Text not found:\n${searchText}`); + } + + return { + start: pos, + end: pos + searchText.length, + lineNumber: normalized.slice(0, pos).split('\n').length + }; +} + // Edit preview type interface EditPreview { original: string; @@ -225,37 +248,39 @@ interface EditPreview { } // File editing utilities -async function applyFileEdits(filePath: string, edits: Array<{oldText: string, newText: string}>, dryRun: boolean = false): Promise { +async function applyFileEdits(filePath: string, edits: Array<{oldText: string, newText: string}>, dryRun = false): Promise { let content = await fs.readFile(filePath, 'utf-8'); const previews: EditPreview[] = []; - for (const edit of edits) { - const pos = content.indexOf(edit.oldText); - if (pos === -1) { - throw new Error( - `Search text not found in ${filePath}:\n${edit.oldText}` - ); - } - - const lineNumber = content.slice(0, pos).split(/\r?\n/).length; + // Find all positions first + const positions = edits.map(edit => ({ + edit, + position: findTextPosition(content, edit.oldText) + })); + + // Sort by position in reverse order + positions.sort((a, b) => b.position.start - a.position.start); + + // Apply edits from end to start + for (const {edit, position} of positions) { const preview = [ - `@@ line ${lineNumber} @@`, + `@@ line ${position.lineNumber} @@`, '<<<<<<< ORIGINAL', edit.oldText, '=======', edit.newText, - '>>>>>>> MODIFIED' + '>>>>>>> MODIFIED' ].join('\n'); previews.push({ original: edit.oldText, modified: edit.newText, - lineNumber, + lineNumber: position.lineNumber, preview }); - + if (!dryRun) { - content = content.slice(0, pos) + edit.newText + content.slice(pos + edit.oldText.length); + content = content.slice(0, position.start) + edit.newText + content.slice(position.end); } } From bb7925fe11e3500dad6fc1d4baf5e20925fc2358 Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Wed, 4 Dec 2024 07:35:12 -0800 Subject: [PATCH 26/43] improve whitespace sensitivity in multiline text matching --- src/filesystem/index.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index e736b1c1..12dde55d 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -231,10 +231,14 @@ function findTextPosition(content: string, searchText: string): Position { if (pos === -1) { throw new Error(`Text not found:\n${searchText}`); } + + // Map back to original content position + const originalPos = content.slice(0, pos).replace(/[ \t]+/g, ' ').length; + const originalEnd = originalPos + searchText.length; return { start: pos, - end: pos + searchText.length, + end: originalEnd, lineNumber: normalized.slice(0, pos).split('\n').length }; } From 07b47bc25bab3f90021aa81c7581d6e1b254a94d Mon Sep 17 00:00:00 2001 From: Skirano Date: Wed, 4 Dec 2024 12:57:39 -0500 Subject: [PATCH 27/43] fixed version and updated the server list in the main Readme --- README.md | 7 +++++-- src/aws-kb-retrieval-server/package.json | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 28604b9a..a7fc2dfc 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,9 @@ Each MCP server is implemented with either the [Typescript MCP SDK](https://gith - **[Brave Search](src/brave-search)** - Web and local search using Brave's Search API - **[Google Maps](src/google-maps)** - Location services, directions, and place details - **[Fetch](src/fetch)** - Web content fetching and conversion for efficient LLM usage +- **[Sequential Thinking](src/sequential-thinking)** - Dynamic and reflective problem-solving through thought sequences +- **[EverArt](src/everart)** - AI image generation using various models +- **[AWS KB Retrieval](src/aws-kb-retrieval)** - Retrieval from AWS Knowledge Base using Bedrock Agent Runtime ## 🌎 Community Servers @@ -29,14 +32,14 @@ Each MCP server is implemented with either the [Typescript MCP SDK](https://gith ## 🚀 Getting Started ### Using MCP Servers in this Repository -Typescript-based servers in this repository can be used directly with `npx`. +Typescript-based servers in this repository can be used directly with `npx`. For example, this will start the [Memory](src/memory) server: ```sh npx -y @modelcontextprotocol/server-memory ``` -Python-based servers in this repository can be used directly with [`uvx`](https://docs.astral.sh/uv/concepts/tools/) or [`pip`](https://pypi.org/project/pip/). `uvx` is recommended for ease of use and setup. +Python-based servers in this repository can be used directly with [`uvx`](https://docs.astral.sh/uv/concepts/tools/) or [`pip`](https://pypi.org/project/pip/). `uvx` is recommended for ease of use and setup. For example, this will start the [Git](src/git) server: ```sh diff --git a/src/aws-kb-retrieval-server/package.json b/src/aws-kb-retrieval-server/package.json index 39ba7bd4..d763e080 100644 --- a/src/aws-kb-retrieval-server/package.json +++ b/src/aws-kb-retrieval-server/package.json @@ -1,6 +1,6 @@ { "name": "@modelcontextprotocol/server-aws-kb-retrieval", - "version": "0.1.0", + "version": "0.2.0", "description": "MCP server for AWS Knowledge Base retrieval using Bedrock Agent Runtime", "license": "MIT", "author": "Anthropic, PBC (https://anthropic.com)", From 69a676b6fad06a7e1260fed8b80a9732fb861354 Mon Sep 17 00:00:00 2001 From: Simon Benedict <102378134+SimonB97@users.noreply.github.com> Date: Wed, 4 Dec 2024 19:47:45 +0100 Subject: [PATCH 28/43] Update README.md Add entry for Windows CLI MCP Server to the README.md file. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1c735776..1c9b45a3 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,7 @@ A growing set of community-developed and maintained servers demonstrates various - **[AWS](https://github.com/rishikavikondala/mcp-server-aws)** - Perform operations on your AWS resources using an LLM - **[LlamaCloud](https://github.com/run-llama/mcp-server-llamacloud)** (by marcusschiesser) - Integrate the data stored in a managed index on [LlamaCloud](https://cloud.llamaindex.ai/) - **[Any Chat Completions](https://github.com/pyroprompts/any-chat-completions-mcp)** - Interact with any OpenAI SDK Compatible Chat Completions API like OpenAI, Perplexity, Groq, xAI and many more. +- **[Windows CLI](https://github.com/SimonB97/win-cli-mcp-server)** - MCP server for secure command-line interactions on Windows systems, enabling controlled access to PowerShell, CMD, and Git Bash shells. ## 📚 Resources From 5c816e3dfa6bbac93b35b134cf6fa8d3f542b4df Mon Sep 17 00:00:00 2001 From: Jeremy Hadfield Date: Wed, 4 Dec 2024 12:26:42 -0700 Subject: [PATCH 29/43] Add Linear community server --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1c735776..f6c62837 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,7 @@ A growing set of community-developed and maintained servers demonstrates various - **[BigQuery](https://github.com/LucasHild/mcp-server-bigquery)** (by LucasHild) - This server enables LLMs to inspect database schemas and execute queries on BigQuery. - **[BigQuery](https://github.com/ergut/mcp-bigquery-server)** (by ergut) - Server implementation for Google BigQuery integration that enables direct BigQuery database access and querying capabilities - **[Todoist](https://github.com/abhiz123/todoist-mcp-server)** - Interact with Todoist to manage your tasks. +- **[Linear](https://github.com/jerhadf/linear-mcp-server)** - Allows LLM to interact with Linear's API for project management, including searching, creating, and updating issues. - **[Playwright MCP](https://github.com/executeautomation/mcp-playwright)** - This MCP Server will help you run browser automation and webscraping using Playwright - **[AWS](https://github.com/rishikavikondala/mcp-server-aws)** - Perform operations on your AWS resources using an LLM - **[LlamaCloud](https://github.com/run-llama/mcp-server-llamacloud)** (by marcusschiesser) - Integrate the data stored in a managed index on [LlamaCloud](https://cloud.llamaindex.ai/) From 64f6c7679466182064d306f99c1c0af9e91c830d Mon Sep 17 00:00:00 2001 From: Shane Date: Wed, 4 Dec 2024 19:00:47 -0500 Subject: [PATCH 30/43] Add OpenRPC community server --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1c735776..2abbcfff 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,7 @@ A growing set of community-developed and maintained servers demonstrates various - **[AWS](https://github.com/rishikavikondala/mcp-server-aws)** - Perform operations on your AWS resources using an LLM - **[LlamaCloud](https://github.com/run-llama/mcp-server-llamacloud)** (by marcusschiesser) - Integrate the data stored in a managed index on [LlamaCloud](https://cloud.llamaindex.ai/) - **[Any Chat Completions](https://github.com/pyroprompts/any-chat-completions-mcp)** - Interact with any OpenAI SDK Compatible Chat Completions API like OpenAI, Perplexity, Groq, xAI and many more. +- **[OpenRPC](https://github.com/shanejonas/openrpc-mpc-server)** - Interact with and discover JSON-RPC APIs via [OpenRPC](https://open-rpc.org). ## 📚 Resources From 02ff589f5803c8412eb50d4e80691acd5979d35d Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Wed, 4 Dec 2024 19:21:04 -0800 Subject: [PATCH 31/43] user aider inspired diff approach --- src/filesystem/index.ts | 194 +++++++++++++++++++++++++++------------- 1 file changed, 130 insertions(+), 64 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index 12dde55d..3306f315 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -216,79 +216,146 @@ async function searchFiles( return results; } -interface Position { - start: number; - end: number; +interface DiffLine { + type: 'context' | 'addition' | 'deletion'; + content: string; lineNumber: number; } -function findTextPosition(content: string, searchText: string): Position { - // Handle different line endings - const normalized = content.replace(/\r\n/g, '\n'); - const searchNormalized = searchText.replace(/\r\n/g, '\n'); +function createUnifiedDiff(originalLines: string[], newLines: string[], contextSize: number = 3): string { + const differ = new Array(); + let lineNumber = 1; - const pos = normalized.indexOf(searchNormalized); - if (pos === -1) { - throw new Error(`Text not found:\n${searchText}`); + // Helper to add context lines + function addContext(lines: string[], start: number, count: number) { + for (let i = 0; i < count && start + i < lines.length; i++) { + differ.push({ + type: 'context', + content: lines[start + i], + lineNumber: start + i + 1 + }); + } } - // Map back to original content position - const originalPos = content.slice(0, pos).replace(/[ \t]+/g, ' ').length; - const originalEnd = originalPos + searchText.length; + // Find the differences using longest common subsequence + const changes: Array<{type: 'context' | 'addition' | 'deletion', line: string, index: number}> = []; + let i = 0, j = 0; - return { - start: pos, - end: originalEnd, - lineNumber: normalized.slice(0, pos).split('\n').length - }; -} + while (i < originalLines.length || j < newLines.length) { + if (i < originalLines.length && j < newLines.length && originalLines[i] === newLines[j]) { + changes.push({type: 'context', line: originalLines[i], index: i}); + i++; + j++; + } else { + if (i < originalLines.length) { + changes.push({type: 'deletion', line: originalLines[i], index: i}); + i++; + } + if (j < newLines.length) { + changes.push({type: 'addition', line: newLines[j], index: j}); + j++; + } + } + } -// Edit preview type -interface EditPreview { - original: string; - modified: string; - lineNumber: number; - preview: string; // Git-style diff format + // Group changes into hunks with context + let currentHunk: DiffLine[] = []; + let hunks: DiffLine[][] = []; + let lastChangeIndex = -1; + + for (let i = 0; i < changes.length; i++) { + const change = changes[i]; + + if (change.type !== 'context' || + (lastChangeIndex >= 0 && i - lastChangeIndex <= contextSize * 2)) { + if (change.type !== 'context') { + lastChangeIndex = i; + } + currentHunk.push({ + type: change.type, + content: change.line, + lineNumber: change.index + 1 + }); + } else { + if (currentHunk.length > 0) { + hunks.push(currentHunk); + currentHunk = []; + } + } + } + + if (currentHunk.length > 0) { + hunks.push(currentHunk); + } + + // Format the diff output + let diffOutput = ''; + + for (const hunk of hunks) { + const startLine = hunk[0].lineNumber; + const endLine = hunk[hunk.length - 1].lineNumber; + + diffOutput += `@@ -${startLine},${endLine} @@\n`; + + for (const line of hunk) { + const prefix = line.type === 'addition' ? '+' : + line.type === 'deletion' ? '-' : ' '; + diffOutput += `${prefix}${line.content}\n`; + } + + diffOutput += '\n'; + } + + return diffOutput; } // File editing utilities -async function applyFileEdits(filePath: string, edits: Array<{oldText: string, newText: string}>, dryRun = false): Promise { +async function applyFileEdits( + filePath: string, + edits: Array<{oldText: string, newText: string}>, + dryRun = false +): Promise { let content = await fs.readFile(filePath, 'utf-8'); - const previews: EditPreview[] = []; - - // Find all positions first - const positions = edits.map(edit => ({ - edit, - position: findTextPosition(content, edit.oldText) - })); + const originalLines = content.split('\n'); + let modifiedContent = content; - // Sort by position in reverse order - positions.sort((a, b) => b.position.start - a.position.start); - - // Apply edits from end to start - for (const {edit, position} of positions) { - const preview = [ - `@@ line ${position.lineNumber} @@`, - '<<<<<<< ORIGINAL', - edit.oldText, - '=======', - edit.newText, - '>>>>>>> MODIFIED' - ].join('\n'); + // First, validate all edits can be applied + const positions = edits.map(edit => { + const pos = modifiedContent.indexOf(edit.oldText); + if (pos === -1) { + throw new Error(`Text not found:\n${edit.oldText}`); + } + return { + edit, + position: pos, + length: edit.oldText.length + }; + }); + + // Sort positions in reverse order to apply from end to start + positions.sort((a, b) => b.position - a.position); + + if (dryRun) { + // For dry run, create a unified diff preview + for (const {edit, position} of positions) { + modifiedContent = + modifiedContent.slice(0, position) + + edit.newText + + modifiedContent.slice(position + edit.oldText.length); + } - previews.push({ - original: edit.oldText, - modified: edit.newText, - lineNumber: position.lineNumber, - preview - }); - - if (!dryRun) { - content = content.slice(0, position.start) + edit.newText + content.slice(position.end); + const modifiedLines = modifiedContent.split('\n'); + return createUnifiedDiff(originalLines, modifiedLines); + } else { + // Apply the edits + for (const {edit, position} of positions) { + modifiedContent = + modifiedContent.slice(0, position) + + edit.newText + + modifiedContent.slice(position + edit.oldText.length); } + return modifiedContent; } - - return dryRun ? previews : content; } // Tool handlers @@ -325,9 +392,9 @@ server.setRequestHandler(ListToolsRequestSchema, async () => { { name: "edit_file", description: - "Make selective edits to a text file using simple search and replace with git-style preview format. " + - "Finds text to replace using substring matching and shows changes in a familiar git-diff format. " + - "Use dry run mode to preview changes before applying them. " + + "Make selective edits to a text file using search and replace with unified diff previews. " + + "Shows changes in standard unified diff format with context lines, similar to git diff. " + + "Use dry run mode to preview changes in patch format before applying them. " + "Only works within allowed directories.", inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput, }, @@ -452,15 +519,14 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { const validPath = await validatePath(parsed.data.path); const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun); - // If it's a dry run, format the previews + // If it's a dry run, show the unified diff if (parsed.data.dryRun) { - const previewText = (result as EditPreview[]).map(preview => preview.preview).join('\n\n'); return { - content: [{ type: "text", text: `Edit preview:\n${previewText}` }], + content: [{ type: "text", text: `Edit preview:\n${result}` }], }; } - await fs.writeFile(validPath, result as string, "utf-8"); + await fs.writeFile(validPath, result, "utf-8"); return { content: [{ type: "text", text: `Successfully applied edits to ${parsed.data.path}` }], }; From b6e052946a091cb053ff08c418dd65d0498926e5 Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Wed, 4 Dec 2024 20:13:58 -0800 Subject: [PATCH 32/43] use the diff package, aider inspired search --- src/filesystem/index.ts | 319 +++++++++++++++++++++++------------- src/filesystem/package.json | 4 +- 2 files changed, 210 insertions(+), 113 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index 3306f315..88f68ab3 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -12,6 +12,7 @@ import path from "path"; import os from 'os'; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; +import { diffLines, createTwoFilesPatch } from 'diff'; // Command line argument parsing const args = process.argv.slice(2); @@ -216,146 +217,238 @@ async function searchFiles( return results; } -interface DiffLine { - type: 'context' | 'addition' | 'deletion'; - content: string; - lineNumber: number; +// file editing and diffing utilities +function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string { + return createTwoFilesPatch( + filepath, + filepath, + originalContent, + newContent, + 'original', + 'modified' + ); } -function createUnifiedDiff(originalLines: string[], newLines: string[], contextSize: number = 3): string { - const differ = new Array(); - let lineNumber = 1; - - // Helper to add context lines - function addContext(lines: string[], start: number, count: number) { - for (let i = 0; i < count && start + i < lines.length; i++) { - differ.push({ - type: 'context', - content: lines[start + i], - lineNumber: start + i + 1 - }); - } +// Utility functions for text normalization and matching +function normalizeLineEndings(text: string): string { + return text.replace(/\r\n/g, '\n').replace(/\r/g, '\n'); +} + +function normalizeWhitespace(text: string, preserveIndentation: boolean = true): string { + if (!preserveIndentation) { + // Collapse all whitespace to single spaces if not preserving indentation + return text.replace(/\s+/g, ' '); } + + // Preserve line structure but normalize inline whitespace + return text.split('\n').map(line => { + // Preserve leading whitespace + const indent = line.match(/^[\s\t]*/)?.[0] || ''; + // Normalize rest of line + const content = line.slice(indent.length).trim().replace(/\s+/g, ' '); + return indent + content; + }).join('\n'); +} + +interface EditOptions { + preserveIndentation?: boolean; + normalizeWhitespace?: boolean; + partialMatch?: boolean; +} + +interface EditMatch { + start: number; + end: number; + confidence: number; +} - // Find the differences using longest common subsequence - const changes: Array<{type: 'context' | 'addition' | 'deletion', line: string, index: number}> = []; - let i = 0, j = 0; +function findBestMatch(content: string, searchText: string, options: EditOptions): EditMatch | null { + const normalizedContent = normalizeLineEndings(content); + const normalizedSearch = normalizeLineEndings(searchText); - while (i < originalLines.length || j < newLines.length) { - if (i < originalLines.length && j < newLines.length && originalLines[i] === newLines[j]) { - changes.push({type: 'context', line: originalLines[i], index: i}); - i++; - j++; - } else { - if (i < originalLines.length) { - changes.push({type: 'deletion', line: originalLines[i], index: i}); - i++; - } - if (j < newLines.length) { - changes.push({type: 'addition', line: newLines[j], index: j}); - j++; - } + // Try exact match first + const exactPos = normalizedContent.indexOf(normalizedSearch); + if (exactPos !== -1) { + return { + start: exactPos, + end: exactPos + searchText.length, + confidence: 1.0 + }; + } + + // If whitespace normalization is enabled, try that next + if (options.normalizeWhitespace) { + const normContent = normalizeWhitespace(normalizedContent, options.preserveIndentation); + const normSearch = normalizeWhitespace(normalizedSearch, options.preserveIndentation); + const normPos = normContent.indexOf(normSearch); + + if (normPos !== -1) { + // Find the corresponding position in original text + const beforeMatch = normContent.slice(0, normPos); + const originalPos = findOriginalPosition(content, beforeMatch); + return { + start: originalPos, + end: originalPos + searchText.length, + confidence: 0.9 + }; } } - - // Group changes into hunks with context - let currentHunk: DiffLine[] = []; - let hunks: DiffLine[][] = []; - let lastChangeIndex = -1; - - for (let i = 0; i < changes.length; i++) { - const change = changes[i]; + + // If partial matching is enabled, try to find the best partial match + if (options.partialMatch) { + const lines = normalizedContent.split('\n'); + const searchLines = normalizedSearch.split('\n'); + + let bestMatch: EditMatch | null = null; + let bestScore = 0; - if (change.type !== 'context' || - (lastChangeIndex >= 0 && i - lastChangeIndex <= contextSize * 2)) { - if (change.type !== 'context') { - lastChangeIndex = i; + // Sliding window search through the content + for (let i = 0; i < lines.length - searchLines.length + 1; i++) { + let matchScore = 0; + let matchLength = 0; + + for (let j = 0; j < searchLines.length; j++) { + const contentLine = options.normalizeWhitespace + ? normalizeWhitespace(lines[i + j], options.preserveIndentation) + : lines[i + j]; + const searchLine = options.normalizeWhitespace + ? normalizeWhitespace(searchLines[j], options.preserveIndentation) + : searchLines[j]; + + const similarity = calculateSimilarity(contentLine, searchLine); + matchScore += similarity; + matchLength += lines[i + j].length + 1; // +1 for newline } - currentHunk.push({ - type: change.type, - content: change.line, - lineNumber: change.index + 1 - }); - } else { - if (currentHunk.length > 0) { - hunks.push(currentHunk); - currentHunk = []; + + const averageScore = matchScore / searchLines.length; + if (averageScore > bestScore && averageScore > 0.7) { // Threshold for minimum match quality + bestScore = averageScore; + const start = lines.slice(0, i).reduce((acc, line) => acc + line.length + 1, 0); + bestMatch = { + start, + end: start + matchLength, + confidence: averageScore + }; } } + + return bestMatch; } - if (currentHunk.length > 0) { - hunks.push(currentHunk); - } + return null; +} - // Format the diff output - let diffOutput = ''; +function calculateSimilarity(str1: string, str2: string): number { + const len1 = str1.length; + const len2 = str2.length; + const matrix: number[][] = Array(len1 + 1).fill(null).map(() => Array(len2 + 1).fill(0)); - for (const hunk of hunks) { - const startLine = hunk[0].lineNumber; - const endLine = hunk[hunk.length - 1].lineNumber; - - diffOutput += `@@ -${startLine},${endLine} @@\n`; - - for (const line of hunk) { - const prefix = line.type === 'addition' ? '+' : - line.type === 'deletion' ? '-' : ' '; - diffOutput += `${prefix}${line.content}\n`; + for (let i = 0; i <= len1; i++) matrix[i][0] = i; + for (let j = 0; j <= len2; j++) matrix[0][j] = j; + + for (let i = 1; i <= len1; i++) { + for (let j = 1; j <= len2; j++) { + const cost = str1[i - 1] === str2[j - 1] ? 0 : 1; + matrix[i][j] = Math.min( + matrix[i - 1][j] + 1, + matrix[i][j - 1] + 1, + matrix[i - 1][j - 1] + cost + ); } - - diffOutput += '\n'; } + + const maxLength = Math.max(len1, len2); + return maxLength === 0 ? 1 : (maxLength - matrix[len1][len2]) / maxLength; +} - return diffOutput; +function findOriginalPosition(original: string, normalizedPrefix: string): number { + let origPos = 0; + let normPos = 0; + + while (normPos < normalizedPrefix.length && origPos < original.length) { + if (normalizeWhitespace(original[origPos], true) === normalizedPrefix[normPos]) { + normPos++; + } + origPos++; + } + + return origPos; } -// File editing utilities async function applyFileEdits( - filePath: string, - edits: Array<{oldText: string, newText: string}>, - dryRun = false -): Promise { - let content = await fs.readFile(filePath, 'utf-8'); - const originalLines = content.split('\n'); + filePath: string, + edits: Array<{oldText: string, newText: string}>, + dryRun = false, + options: EditOptions = { + preserveIndentation: true, + normalizeWhitespace: true, + partialMatch: true + } +): Promise { + const content = await fs.readFile(filePath, 'utf-8'); let modifiedContent = content; + const failedEdits: Array<{edit: typeof edits[0], error: string}> = []; + const successfulEdits: Array<{edit: typeof edits[0], match: EditMatch}> = []; - // First, validate all edits can be applied - const positions = edits.map(edit => { - const pos = modifiedContent.indexOf(edit.oldText); - if (pos === -1) { - throw new Error(`Text not found:\n${edit.oldText}`); + // Sort edits by position (if found) to apply them in order + for (const edit of edits) { + const match = findBestMatch(modifiedContent, edit.oldText, options); + + if (!match) { + failedEdits.push({ + edit, + error: 'No suitable match found' + }); + continue; } - return { - edit, - position: pos, - length: edit.oldText.length - }; - }); - - // Sort positions in reverse order to apply from end to start - positions.sort((a, b) => b.position - a.position); - + + // For low confidence matches in non-dry-run mode, we might want to throw + if (!dryRun && match.confidence < 0.8) { + failedEdits.push({ + edit, + error: `Match confidence too low: ${(match.confidence * 100).toFixed(1)}%` + }); + continue; + } + + successfulEdits.push({ edit, match }); + } + + // Sort successful edits by position (reverse order to maintain positions) + successfulEdits.sort((a, b) => b.match.start - a.match.start); + + // Apply successful edits + for (const { edit, match } of successfulEdits) { + modifiedContent = + modifiedContent.slice(0, match.start) + + edit.newText + + modifiedContent.slice(match.end); + } + if (dryRun) { - // For dry run, create a unified diff preview - for (const {edit, position} of positions) { - modifiedContent = - modifiedContent.slice(0, position) + - edit.newText + - modifiedContent.slice(position + edit.oldText.length); + let report = createUnifiedDiff(content, modifiedContent, filePath); + + if (failedEdits.length > 0) { + report += '\nFailed edits:\n' + failedEdits.map(({ edit, error }) => + `- Error: ${error}\n Old text: ${edit.oldText.split('\n')[0]}...\n` + ).join('\n'); } - const modifiedLines = modifiedContent.split('\n'); - return createUnifiedDiff(originalLines, modifiedLines); - } else { - // Apply the edits - for (const {edit, position} of positions) { - modifiedContent = - modifiedContent.slice(0, position) + - edit.newText + - modifiedContent.slice(position + edit.oldText.length); + if (successfulEdits.length > 0) { + report += '\nSuccessful edits:\n' + successfulEdits.map(({ edit, match }) => + `- Match confidence: ${(match.confidence * 100).toFixed(1)}%\n Position: ${match.start}-${match.end}\n` + ).join('\n'); } - return modifiedContent; + + return report; } + + if (failedEdits.length > 0) { + const errors = failedEdits.map(({ error }) => error).join('\n'); + throw new Error(`Some edits failed:\n${errors}`); + } + + return modifiedContent; } // Tool handlers @@ -392,8 +485,10 @@ server.setRequestHandler(ListToolsRequestSchema, async () => { { name: "edit_file", description: - "Make selective edits to a text file using search and replace with unified diff previews. " + + "Make selective edits to a text file using line-based pattern matching and replacement. " + + "Handles both single-line and multi-line edits, with smart positioning to handle multiple edits simultaneously. " + "Shows changes in standard unified diff format with context lines, similar to git diff. " + + "Provides detailed diff output for failed matches to aid debugging. " + "Use dry run mode to preview changes in patch format before applying them. " + "Only works within allowed directories.", inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput, diff --git a/src/filesystem/package.json b/src/filesystem/package.json index 581ad818..8229e0d5 100644 --- a/src/filesystem/package.json +++ b/src/filesystem/package.json @@ -20,12 +20,14 @@ }, "dependencies": { "@modelcontextprotocol/sdk": "0.5.0", + "diff": "^5.1.0", "glob": "^10.3.10", "zod-to-json-schema": "^3.23.5" }, "devDependencies": { + "@types/diff": "^5.0.9", "@types/node": "^20.11.0", "shx": "^0.3.4", "typescript": "^5.3.3" } -} +} \ No newline at end of file From b04c9334bc6abef848816080c3526ff7f43cdfaf Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Wed, 4 Dec 2024 20:45:43 -0800 Subject: [PATCH 33/43] schema def issues --- src/filesystem/index.ts | 45 +++++++++++++++++++++++------------------ 1 file changed, 25 insertions(+), 20 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index 88f68ab3..a3ce7f04 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -114,13 +114,23 @@ const EditOperation = z.object({ newText: z.string().describe('Text to replace the found text with'), }); +const EditOptions = z.object({ + preserveIndentation: z.boolean().default(true).describe('Preserve existing indentation patterns in the file'), + normalizeWhitespace: z.boolean().default(true).describe('Normalize whitespace while preserving structure'), + partialMatch: z.boolean().default(true).describe('Enable fuzzy matching with confidence scoring') +}); + const EditFileArgsSchema = z.object({ path: z.string(), edits: z.array(EditOperation), // Optional: preview changes without applying them - dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format') + dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format'), + // Optional: configure matching and formatting behavior + options: EditOptions.default({}) }); + + const CreateDirectoryArgsSchema = z.object({ path: z.string(), }); @@ -250,19 +260,13 @@ function normalizeWhitespace(text: string, preserveIndentation: boolean = true): }).join('\n'); } -interface EditOptions { - preserveIndentation?: boolean; - normalizeWhitespace?: boolean; - partialMatch?: boolean; -} - interface EditMatch { start: number; end: number; confidence: number; } -function findBestMatch(content: string, searchText: string, options: EditOptions): EditMatch | null { +function findBestMatch(content: string, searchText: string, options: z.infer): EditMatch | null { const normalizedContent = normalizeLineEndings(content); const normalizedSearch = normalizeLineEndings(searchText); @@ -379,11 +383,7 @@ async function applyFileEdits( filePath: string, edits: Array<{oldText: string, newText: string}>, dryRun = false, - options: EditOptions = { - preserveIndentation: true, - normalizeWhitespace: true, - partialMatch: true - } + options: z.infer = EditOptions.parse({}) ): Promise { const content = await fs.readFile(filePath, 'utf-8'); let modifiedContent = content; @@ -485,12 +485,17 @@ server.setRequestHandler(ListToolsRequestSchema, async () => { { name: "edit_file", description: - "Make selective edits to a text file using line-based pattern matching and replacement. " + - "Handles both single-line and multi-line edits, with smart positioning to handle multiple edits simultaneously. " + - "Shows changes in standard unified diff format with context lines, similar to git diff. " + - "Provides detailed diff output for failed matches to aid debugging. " + - "Use dry run mode to preview changes in patch format before applying them. " + - "Only works within allowed directories.", + "Make selective edits to a text file using advanced pattern matching and smart formatting preservation. Features include:\n" + + "- Line-based and multi-line content matching\n" + + "- Whitespace normalization with indentation preservation\n" + + "- Fuzzy matching with confidence scoring\n" + + "- Multiple simultaneous edits with correct positioning\n" + + "- Indentation style detection and preservation\n" + + "- Detailed diff output with context in git format\n" + + "- Dry run mode for previewing changes\n" + + "- Failed match debugging with match confidence scores\n\n" + + "Configure behavior with options.preserveIndentation, options.normalizeWhitespace, and options.partialMatch. " + + "See schema for detailed option descriptions. Only works within allowed directories.", inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput, }, { @@ -612,7 +617,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { throw new Error(`Invalid arguments for edit_file: ${parsed.error}`); } const validPath = await validatePath(parsed.data.path); - const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun); + const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun, parsed.data.options); // If it's a dry run, show the unified diff if (parsed.data.dryRun) { From 7417d4dc075a702d7fc61eae55a540c8521dd171 Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Wed, 4 Dec 2024 20:51:39 -0800 Subject: [PATCH 34/43] update read me for current impl --- src/filesystem/README.md | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/src/filesystem/README.md b/src/filesystem/README.md index 31995d8f..37bc290f 100644 --- a/src/filesystem/README.md +++ b/src/filesystem/README.md @@ -37,23 +37,28 @@ Node.js server implementing Model Context Protocol (MCP) for filesystem operatio - `content` (string): File content - **edit_file** - - Make selective edits using search and replace + - Make selective edits using advanced pattern matching and formatting - Features: - - Simple substring matching for finding text - - Git-style preview format for changes + - Line-based and multi-line content matching + - Whitespace normalization with indentation preservation + - Fuzzy matching with confidence scoring + - Multiple simultaneous edits with correct positioning + - Indentation style detection and preservation + - Git-style diff output with context - Preview changes with dry run mode - - Preserves consistent indentation patterns - - Limitations: - - Intended for content changes, not code formatting - - Mixed tabs/spaces can cause pattern matching issues - - Use code formatters (e.g., Prettier, ESLint) before content edits + - Failed match debugging with confidence scores - Inputs: - `path` (string): File to edit - `edits` (array): List of edit operations - `oldText` (string): Text to search for (can be substring) - `newText` (string): Text to replace with - - `dryRun` (boolean): Preview changes without applying (default: false) - - Returns preview information for dry runs, otherwise applies changes + - `dryRun` (boolean): Preview changes without applying (default: false) + - `options` (object): Optional formatting settings + - `preserveIndentation` (boolean): Keep existing indentation (default: true) + - `normalizeWhitespace` (boolean): Normalize spaces while preserving structure (default: true) + - `partialMatch` (boolean): Enable fuzzy matching (default: true) + - Returns detailed diff and match information for dry runs, otherwise applies changes + - Best Practice: Always use dryRun first to preview changes before applying them - **create_directory** - Create new directory or ensure it exists From b477af5c0404bf53d6643f73fda1e126dd27a935 Mon Sep 17 00:00:00 2001 From: Marc Goodner Date: Thu, 5 Dec 2024 00:33:49 -0800 Subject: [PATCH 35/43] seriously, like aider --- src/filesystem/index.ts | 304 ++++++++++------------------------------ 1 file changed, 74 insertions(+), 230 deletions(-) diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts index a3ce7f04..23d989d0 100644 --- a/src/filesystem/index.ts +++ b/src/filesystem/index.ts @@ -108,29 +108,16 @@ const WriteFileArgsSchema = z.object({ }); const EditOperation = z.object({ - // The text to search for - oldText: z.string().describe('Text to search for - can be a substring of the target'), - // The new text to replace with - newText: z.string().describe('Text to replace the found text with'), -}); - -const EditOptions = z.object({ - preserveIndentation: z.boolean().default(true).describe('Preserve existing indentation patterns in the file'), - normalizeWhitespace: z.boolean().default(true).describe('Normalize whitespace while preserving structure'), - partialMatch: z.boolean().default(true).describe('Enable fuzzy matching with confidence scoring') + oldText: z.string().describe('Text to search for - must match exactly'), + newText: z.string().describe('Text to replace with') }); const EditFileArgsSchema = z.object({ path: z.string(), edits: z.array(EditOperation), - // Optional: preview changes without applying them - dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format'), - // Optional: configure matching and formatting behavior - options: EditOptions.default({}) + dryRun: z.boolean().default(false).describe('Preview changes using git-style diff format') }); - - const CreateDirectoryArgsSchema = z.object({ path: z.string(), }); @@ -228,227 +215,101 @@ async function searchFiles( } // file editing and diffing utilities +function normalizeLineEndings(text: string): string { + return text.replace(/\r\n/g, '\n'); +} + function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string { + // Ensure consistent line endings for diff + const normalizedOriginal = normalizeLineEndings(originalContent); + const normalizedNew = normalizeLineEndings(newContent); + return createTwoFilesPatch( filepath, filepath, - originalContent, - newContent, + normalizedOriginal, + normalizedNew, 'original', 'modified' ); } -// Utility functions for text normalization and matching -function normalizeLineEndings(text: string): string { - return text.replace(/\r\n/g, '\n').replace(/\r/g, '\n'); -} - -function normalizeWhitespace(text: string, preserveIndentation: boolean = true): string { - if (!preserveIndentation) { - // Collapse all whitespace to single spaces if not preserving indentation - return text.replace(/\s+/g, ' '); - } - - // Preserve line structure but normalize inline whitespace - return text.split('\n').map(line => { - // Preserve leading whitespace - const indent = line.match(/^[\s\t]*/)?.[0] || ''; - // Normalize rest of line - const content = line.slice(indent.length).trim().replace(/\s+/g, ' '); - return indent + content; - }).join('\n'); -} - -interface EditMatch { - start: number; - end: number; - confidence: number; -} - -function findBestMatch(content: string, searchText: string, options: z.infer): EditMatch | null { - const normalizedContent = normalizeLineEndings(content); - const normalizedSearch = normalizeLineEndings(searchText); - - // Try exact match first - const exactPos = normalizedContent.indexOf(normalizedSearch); - if (exactPos !== -1) { - return { - start: exactPos, - end: exactPos + searchText.length, - confidence: 1.0 - }; - } - - // If whitespace normalization is enabled, try that next - if (options.normalizeWhitespace) { - const normContent = normalizeWhitespace(normalizedContent, options.preserveIndentation); - const normSearch = normalizeWhitespace(normalizedSearch, options.preserveIndentation); - const normPos = normContent.indexOf(normSearch); - - if (normPos !== -1) { - // Find the corresponding position in original text - const beforeMatch = normContent.slice(0, normPos); - const originalPos = findOriginalPosition(content, beforeMatch); - return { - start: originalPos, - end: originalPos + searchText.length, - confidence: 0.9 - }; - } - } - - // If partial matching is enabled, try to find the best partial match - if (options.partialMatch) { - const lines = normalizedContent.split('\n'); - const searchLines = normalizedSearch.split('\n'); - - let bestMatch: EditMatch | null = null; - let bestScore = 0; - - // Sliding window search through the content - for (let i = 0; i < lines.length - searchLines.length + 1; i++) { - let matchScore = 0; - let matchLength = 0; - - for (let j = 0; j < searchLines.length; j++) { - const contentLine = options.normalizeWhitespace - ? normalizeWhitespace(lines[i + j], options.preserveIndentation) - : lines[i + j]; - const searchLine = options.normalizeWhitespace - ? normalizeWhitespace(searchLines[j], options.preserveIndentation) - : searchLines[j]; - - const similarity = calculateSimilarity(contentLine, searchLine); - matchScore += similarity; - matchLength += lines[i + j].length + 1; // +1 for newline - } - - const averageScore = matchScore / searchLines.length; - if (averageScore > bestScore && averageScore > 0.7) { // Threshold for minimum match quality - bestScore = averageScore; - const start = lines.slice(0, i).reduce((acc, line) => acc + line.length + 1, 0); - bestMatch = { - start, - end: start + matchLength, - confidence: averageScore - }; - } - } - - return bestMatch; - } - - return null; -} - -function calculateSimilarity(str1: string, str2: string): number { - const len1 = str1.length; - const len2 = str2.length; - const matrix: number[][] = Array(len1 + 1).fill(null).map(() => Array(len2 + 1).fill(0)); - - for (let i = 0; i <= len1; i++) matrix[i][0] = i; - for (let j = 0; j <= len2; j++) matrix[0][j] = j; - - for (let i = 1; i <= len1; i++) { - for (let j = 1; j <= len2; j++) { - const cost = str1[i - 1] === str2[j - 1] ? 0 : 1; - matrix[i][j] = Math.min( - matrix[i - 1][j] + 1, - matrix[i][j - 1] + 1, - matrix[i - 1][j - 1] + cost - ); - } - } - - const maxLength = Math.max(len1, len2); - return maxLength === 0 ? 1 : (maxLength - matrix[len1][len2]) / maxLength; -} - -function findOriginalPosition(original: string, normalizedPrefix: string): number { - let origPos = 0; - let normPos = 0; - - while (normPos < normalizedPrefix.length && origPos < original.length) { - if (normalizeWhitespace(original[origPos], true) === normalizedPrefix[normPos]) { - normPos++; - } - origPos++; - } - - return origPos; -} - async function applyFileEdits( filePath: string, edits: Array<{oldText: string, newText: string}>, - dryRun = false, - options: z.infer = EditOptions.parse({}) + dryRun = false ): Promise { - const content = await fs.readFile(filePath, 'utf-8'); - let modifiedContent = content; - const failedEdits: Array<{edit: typeof edits[0], error: string}> = []; - const successfulEdits: Array<{edit: typeof edits[0], match: EditMatch}> = []; + // Read file content and normalize line endings + const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8')); - // Sort edits by position (if found) to apply them in order + // Apply edits sequentially + let modifiedContent = content; for (const edit of edits) { - const match = findBestMatch(modifiedContent, edit.oldText, options); + const normalizedOld = normalizeLineEndings(edit.oldText); + const normalizedNew = normalizeLineEndings(edit.newText); - if (!match) { - failedEdits.push({ - edit, - error: 'No suitable match found' - }); + // If exact match exists, use it + if (modifiedContent.includes(normalizedOld)) { + modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew); continue; } - // For low confidence matches in non-dry-run mode, we might want to throw - if (!dryRun && match.confidence < 0.8) { - failedEdits.push({ - edit, - error: `Match confidence too low: ${(match.confidence * 100).toFixed(1)}%` + // Otherwise, try line-by-line matching with flexibility for whitespace + const oldLines = normalizedOld.split('\n'); + const contentLines = modifiedContent.split('\n'); + let matchFound = false; + + for (let i = 0; i <= contentLines.length - oldLines.length; i++) { + const potentialMatch = contentLines.slice(i, i + oldLines.length); + + // Compare lines with normalized whitespace + const isMatch = oldLines.every((oldLine, j) => { + const contentLine = potentialMatch[j]; + return oldLine.trim() === contentLine.trim(); }); - continue; + + if (isMatch) { + // Preserve original indentation of first line + const originalIndent = contentLines[i].match(/^\s*/)?.[0] || ''; + const newLines = normalizedNew.split('\n').map((line, j) => { + if (j === 0) return originalIndent + line.trimStart(); + // For subsequent lines, try to preserve relative indentation + const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || ''; + const newIndent = line.match(/^\s*/)?.[0] || ''; + if (oldIndent && newIndent) { + const relativeIndent = newIndent.length - oldIndent.length; + return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart(); + } + return line; + }); + + contentLines.splice(i, oldLines.length, ...newLines); + modifiedContent = contentLines.join('\n'); + matchFound = true; + break; + } } - successfulEdits.push({ edit, match }); + if (!matchFound) { + throw new Error(`Could not find exact match for edit:\n${edit.oldText}`); + } } - // Sort successful edits by position (reverse order to maintain positions) - successfulEdits.sort((a, b) => b.match.start - a.match.start); - - // Apply successful edits - for (const { edit, match } of successfulEdits) { - modifiedContent = - modifiedContent.slice(0, match.start) + - edit.newText + - modifiedContent.slice(match.end); - } + // Create unified diff + const diff = createUnifiedDiff(content, modifiedContent, filePath); - if (dryRun) { - let report = createUnifiedDiff(content, modifiedContent, filePath); - - if (failedEdits.length > 0) { - report += '\nFailed edits:\n' + failedEdits.map(({ edit, error }) => - `- Error: ${error}\n Old text: ${edit.oldText.split('\n')[0]}...\n` - ).join('\n'); - } - - if (successfulEdits.length > 0) { - report += '\nSuccessful edits:\n' + successfulEdits.map(({ edit, match }) => - `- Match confidence: ${(match.confidence * 100).toFixed(1)}%\n Position: ${match.start}-${match.end}\n` - ).join('\n'); - } - - return report; + // Format diff with appropriate number of backticks + let numBackticks = 3; + while (diff.includes('`'.repeat(numBackticks))) { + numBackticks++; } + const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`; - if (failedEdits.length > 0) { - const errors = failedEdits.map(({ error }) => error).join('\n'); - throw new Error(`Some edits failed:\n${errors}`); + if (!dryRun) { + await fs.writeFile(filePath, modifiedContent, 'utf-8'); } - return modifiedContent; + return formattedDiff; } // Tool handlers @@ -485,17 +346,9 @@ server.setRequestHandler(ListToolsRequestSchema, async () => { { name: "edit_file", description: - "Make selective edits to a text file using advanced pattern matching and smart formatting preservation. Features include:\n" + - "- Line-based and multi-line content matching\n" + - "- Whitespace normalization with indentation preservation\n" + - "- Fuzzy matching with confidence scoring\n" + - "- Multiple simultaneous edits with correct positioning\n" + - "- Indentation style detection and preservation\n" + - "- Detailed diff output with context in git format\n" + - "- Dry run mode for previewing changes\n" + - "- Failed match debugging with match confidence scores\n\n" + - "Configure behavior with options.preserveIndentation, options.normalizeWhitespace, and options.partialMatch. " + - "See schema for detailed option descriptions. Only works within allowed directories.", + "Make line-based edits to a text file. Each edit replaces exact line sequences " + + "with new content. Returns a git-style diff showing the changes made. " + + "Only works within allowed directories.", inputSchema: zodToJsonSchema(EditFileArgsSchema) as ToolInput, }, { @@ -617,18 +470,9 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { throw new Error(`Invalid arguments for edit_file: ${parsed.error}`); } const validPath = await validatePath(parsed.data.path); - const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun, parsed.data.options); - - // If it's a dry run, show the unified diff - if (parsed.data.dryRun) { - return { - content: [{ type: "text", text: `Edit preview:\n${result}` }], - }; - } - - await fs.writeFile(validPath, result, "utf-8"); + const result = await applyFileEdits(validPath, parsed.data.edits, parsed.data.dryRun); return { - content: [{ type: "text", text: `Successfully applied edits to ${parsed.data.path}` }], + content: [{ type: "text", text: result }], }; } From 3b71d48f65939d322f44edf7a98dd5ab153a7987 Mon Sep 17 00:00:00 2001 From: Raduan77 Date: Thu, 5 Dec 2024 11:37:51 +0100 Subject: [PATCH 36/43] address comment --- src/github/schemas.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/github/schemas.ts b/src/github/schemas.ts index f6b98727..b9c81a15 100644 --- a/src/github/schemas.ts +++ b/src/github/schemas.ts @@ -537,10 +537,6 @@ export const SearchCodeSchema = z.object({ .describe( "Search query. See GitHub code search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-code" ), - sort: z - .enum(["", "indexed"]) - .optional() - .describe("Sort field. Only 'indexed' is supported"), order: z .enum(["asc", "desc"]) .optional() From b0e416294d27c979ff83b68aee02f4aad628f00f Mon Sep 17 00:00:00 2001 From: wong2 Date: Thu, 5 Dec 2024 19:09:55 +0800 Subject: [PATCH 37/43] Add mcp-cli to Resources --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1c735776..58a4a44b 100644 --- a/README.md +++ b/README.md @@ -64,6 +64,7 @@ Additional resources on MCP. - **[Awesome MCP Servers by wong2](https://github.com/wong2/awesome-mcp-servers)** - A curated list of MCP servers by **[wong2](https://github.com/wong2)** - **[Awesome MCP Servers by appcypher](https://github.com/appcypher/awesome-mcp-servers)** - A curated list of MCP servers by **[Stephen Akinyemi](https://github.com/appcypher)** - **[mcp-get](https://mcp-get.com)** - Command line tool for installing and managing MCP servers by **[Michael Latman](https://github.com/michaellatman)** +- **[mcp-cli](https://github.com/wong2/mcp-cli)** - A CLI inspector for the Model Context Protocol by **[wong2](https://github.com/wong2)** ## 🚀 Getting Started From 5020b4b384d0ee3d8e0ef674829f48229ae01305 Mon Sep 17 00:00:00 2001 From: Justin Spahr-Summers Date: Thu, 5 Dec 2024 12:05:27 +0000 Subject: [PATCH 38/43] Update package.json --- src/aws-kb-retrieval-server/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aws-kb-retrieval-server/package.json b/src/aws-kb-retrieval-server/package.json index d763e080..fdad1a69 100644 --- a/src/aws-kb-retrieval-server/package.json +++ b/src/aws-kb-retrieval-server/package.json @@ -1,6 +1,6 @@ { "name": "@modelcontextprotocol/server-aws-kb-retrieval", - "version": "0.2.0", + "version": "0.6.2", "description": "MCP server for AWS Knowledge Base retrieval using Bedrock Agent Runtime", "license": "MIT", "author": "Anthropic, PBC (https://anthropic.com)", From bd4a101ac1e40c3b6a66f9efa2e3d5b3303c8e9f Mon Sep 17 00:00:00 2001 From: Justin Spahr-Summers Date: Thu, 5 Dec 2024 12:10:12 +0000 Subject: [PATCH 39/43] `npm install` --- package-lock.json | 1286 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1286 insertions(+) diff --git a/package-lock.json b/package-lock.json index c73f44ee..b42b4228 100644 --- a/package-lock.json +++ b/package-lock.json @@ -24,6 +24,656 @@ "@modelcontextprotocol/server-slack": "*" } }, + "node_modules/@aws-crypto/crc32": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-5.2.0.tgz", + "integrity": "sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg==", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz", + "integrity": "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw==", + "dependencies": { + "@aws-crypto/sha256-js": "^5.2.0", + "@aws-crypto/supports-web-crypto": "^5.2.0", + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-locate-window": "^3.0.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-browser/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/sha256-js": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz", + "integrity": "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA==", + "dependencies": { + "@aws-crypto/util": "^5.2.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-crypto/supports-web-crypto": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz", + "integrity": "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg==", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-5.2.0.tgz", + "integrity": "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==", + "dependencies": { + "@aws-sdk/types": "^3.222.0", + "@smithy/util-utf8": "^2.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/is-array-buffer": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz", + "integrity": "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-buffer-from": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz", + "integrity": "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA==", + "dependencies": { + "@smithy/is-array-buffer": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-crypto/util/node_modules/@smithy/util-utf8": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-2.3.0.tgz", + "integrity": "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A==", + "dependencies": { + "@smithy/util-buffer-from": "^2.2.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@aws-sdk/client-bedrock-agent-runtime": { + "version": "3.706.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-bedrock-agent-runtime/-/client-bedrock-agent-runtime-3.706.0.tgz", + "integrity": "sha512-XX9Nm88Pz8GdHQJ29h6xQlH21qRnaovtF2BeLdKJRKcS/ViZjqfSFt3B5p6BXf+wKW9YFciGwjuo0OOrDx1Oyw==", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.699.0", + "@aws-sdk/client-sts": "3.699.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-node": "3.699.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/eventstream-serde-browser": "^3.0.13", + "@smithy/eventstream-serde-config-resolver": "^3.0.10", + "@smithy/eventstream-serde-node": "^3.0.12", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.696.0.tgz", + "integrity": "sha512-q5TTkd08JS0DOkHfUL853tuArf7NrPeqoS5UOvqJho8ibV9Ak/a/HO4kNvy9Nj3cib/toHYHsQIEtecUPSUUrQ==", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/client-sso-oidc": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.699.0.tgz", + "integrity": "sha512-u8a1GorY5D1l+4FQAf4XBUC1T10/t7neuwT21r0ymrtMFSK2a9QqVHKMoLkvavAwyhJnARSBM9/UQC797PFOFw==", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-node": "3.699.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.699.0" + } + }, + "node_modules/@aws-sdk/client-sts": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.699.0.tgz", + "integrity": "sha512-++lsn4x2YXsZPIzFVwv3fSUVM55ZT0WRFmPeNilYIhZClxHLmVAWKH4I55cY9ry60/aTKYjzOXkWwyBKGsGvQg==", + "dependencies": { + "@aws-crypto/sha256-browser": "5.2.0", + "@aws-crypto/sha256-js": "5.2.0", + "@aws-sdk/client-sso-oidc": "3.699.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-node": "3.699.0", + "@aws-sdk/middleware-host-header": "3.696.0", + "@aws-sdk/middleware-logger": "3.696.0", + "@aws-sdk/middleware-recursion-detection": "3.696.0", + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/region-config-resolver": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@aws-sdk/util-user-agent-browser": "3.696.0", + "@aws-sdk/util-user-agent-node": "3.696.0", + "@smithy/config-resolver": "^3.0.12", + "@smithy/core": "^2.5.3", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/hash-node": "^3.0.10", + "@smithy/invalid-dependency": "^3.0.10", + "@smithy/middleware-content-length": "^3.0.12", + "@smithy/middleware-endpoint": "^3.2.3", + "@smithy/middleware-retry": "^3.0.27", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-body-length-node": "^3.0.0", + "@smithy/util-defaults-mode-browser": "^3.0.27", + "@smithy/util-defaults-mode-node": "^3.0.27", + "@smithy/util-endpoints": "^2.1.6", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/core": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.696.0.tgz", + "integrity": "sha512-3c9III1k03DgvRZWg8vhVmfIXPG6hAciN9MzQTzqGngzWAELZF/WONRTRQuDFixVtarQatmLHYVw/atGeA2Byw==", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/core": "^2.5.3", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/property-provider": "^3.1.9", + "@smithy/protocol-http": "^4.1.7", + "@smithy/signature-v4": "^4.2.2", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/util-middleware": "^3.0.10", + "fast-xml-parser": "4.4.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-env": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.696.0.tgz", + "integrity": "sha512-T9iMFnJL7YTlESLpVFT3fg1Lkb1lD+oiaIC8KMpepb01gDUBIpj9+Y+pA/cgRWW0yRxmkDXNazAE2qQTVFGJzA==", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-http": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.696.0.tgz", + "integrity": "sha512-GV6EbvPi2eq1+WgY/o2RFA3P7HGmnkIzCNmhwtALFlqMroLYWKE7PSeHw66Uh1dFQeVESn0/+hiUNhu1mB0emA==", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/property-provider": "^3.1.9", + "@smithy/protocol-http": "^4.1.7", + "@smithy/smithy-client": "^3.4.4", + "@smithy/types": "^3.7.1", + "@smithy/util-stream": "^3.3.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-ini": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.699.0.tgz", + "integrity": "sha512-dXmCqjJnKmG37Q+nLjPVu22mNkrGHY8hYoOt3Jo9R2zr5MYV7s/NHsCHr+7E+BZ+tfZYLRPeB1wkpTeHiEcdRw==", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/credential-provider-env": "3.696.0", + "@aws-sdk/credential-provider-http": "3.696.0", + "@aws-sdk/credential-provider-process": "3.696.0", + "@aws-sdk/credential-provider-sso": "3.699.0", + "@aws-sdk/credential-provider-web-identity": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/credential-provider-imds": "^3.2.6", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.699.0" + } + }, + "node_modules/@aws-sdk/credential-provider-node": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.699.0.tgz", + "integrity": "sha512-MmEmNDo1bBtTgRmdNfdQksXu4uXe66s0p1hi1YPrn1h59Q605eq/xiWbGL6/3KdkViH6eGUuABeV2ODld86ylg==", + "dependencies": { + "@aws-sdk/credential-provider-env": "3.696.0", + "@aws-sdk/credential-provider-http": "3.696.0", + "@aws-sdk/credential-provider-ini": "3.699.0", + "@aws-sdk/credential-provider-process": "3.696.0", + "@aws-sdk/credential-provider-sso": "3.699.0", + "@aws-sdk/credential-provider-web-identity": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/credential-provider-imds": "^3.2.6", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-process": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.696.0.tgz", + "integrity": "sha512-mL1RcFDe9sfmyU5K1nuFkO8UiJXXxLX4JO1gVaDIOvPqwStpUAwi3A1BoeZhWZZNQsiKI810RnYGo0E0WB/hUA==", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-sso": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.699.0.tgz", + "integrity": "sha512-Ekp2cZG4pl9D8+uKWm4qO1xcm8/MeiI8f+dnlZm8aQzizeC+aXYy9GyoclSf6daK8KfRPiRfM7ZHBBL5dAfdMA==", + "dependencies": { + "@aws-sdk/client-sso": "3.696.0", + "@aws-sdk/core": "3.696.0", + "@aws-sdk/token-providers": "3.699.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/credential-provider-web-identity": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.696.0.tgz", + "integrity": "sha512-XJ/CVlWChM0VCoc259vWguFUjJDn/QwDqHwbx+K9cg3v6yrqXfK5ai+p/6lx0nQpnk4JzPVeYYxWRpaTsGC9rg==", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sts": "^3.696.0" + } + }, + "node_modules/@aws-sdk/middleware-host-header": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.696.0.tgz", + "integrity": "sha512-zELJp9Ta2zkX7ELggMN9qMCgekqZhFC5V2rOr4hJDEb/Tte7gpfKSObAnw/3AYiVqt36sjHKfdkoTsuwGdEoDg==", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/middleware-logger": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.696.0.tgz", + "integrity": "sha512-KhkHt+8AjCxcR/5Zp3++YPJPpFQzxpr+jmONiT/Jw2yqnSngZ0Yspm5wGoRx2hS1HJbyZNuaOWEGuJoxLeBKfA==", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/middleware-recursion-detection": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.696.0.tgz", + "integrity": "sha512-si/maV3Z0hH7qa99f9ru2xpS5HlfSVcasRlNUXKSDm611i7jFMWwGNLUOXFAOLhXotPX5G3Z6BLwL34oDeBMug==", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/middleware-user-agent": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.696.0.tgz", + "integrity": "sha512-Lvyj8CTyxrHI6GHd2YVZKIRI5Fmnugt3cpJo0VrKKEgK5zMySwEZ1n4dqPK6czYRWKd5+WnYHYAuU+Wdk6Jsjw==", + "dependencies": { + "@aws-sdk/core": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@aws-sdk/util-endpoints": "3.696.0", + "@smithy/core": "^2.5.3", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/region-config-resolver": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.696.0.tgz", + "integrity": "sha512-7EuH142lBXjI8yH6dVS/CZeiK/WZsmb/8zP6bQbVYpMrppSTgB3MzZZdxVZGzL5r8zPQOU10wLC4kIMy0qdBVQ==", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.10", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/token-providers": { + "version": "3.699.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.699.0.tgz", + "integrity": "sha512-kuiEW9DWs7fNos/SM+y58HCPhcIzm1nEZLhe2/7/6+TvAYLuEWURYsbK48gzsxXlaJ2k/jGY3nIsA7RptbMOwA==", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/property-provider": "^3.1.9", + "@smithy/shared-ini-file-loader": "^3.1.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "@aws-sdk/client-sso-oidc": "^3.699.0" + } + }, + "node_modules/@aws-sdk/types": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.696.0.tgz", + "integrity": "sha512-9rTvUJIAj5d3//U5FDPWGJ1nFJLuWb30vugGOrWk7aNZ6y9tuA3PI7Cc9dP8WEXKVyK1vuuk8rSFP2iqXnlgrw==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/util-endpoints": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.696.0.tgz", + "integrity": "sha512-T5s0IlBVX+gkb9g/I6CLt4yAZVzMSiGnbUqWihWsHvQR1WOoIcndQy/Oz/IJXT9T2ipoy7a80gzV6a5mglrioA==", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/types": "^3.7.1", + "@smithy/util-endpoints": "^2.1.6", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/util-locate-window": { + "version": "3.693.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.693.0.tgz", + "integrity": "sha512-ttrag6haJLWABhLqtg1Uf+4LgHWIMOVSYL+VYZmAp2v4PUGOwWmWQH0Zk8RM7YuQcLfH/EoR72/Yxz6A4FKcuw==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@aws-sdk/util-user-agent-browser": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.696.0.tgz", + "integrity": "sha512-Z5rVNDdmPOe6ELoM5AhF/ja5tSjbe6ctSctDPb0JdDf4dT0v2MfwhJKzXju2RzX8Es/77Glh7MlaXLE0kCB9+Q==", + "dependencies": { + "@aws-sdk/types": "3.696.0", + "@smithy/types": "^3.7.1", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@aws-sdk/util-user-agent-node": { + "version": "3.696.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.696.0.tgz", + "integrity": "sha512-KhKqcfyXIB0SCCt+qsu4eJjsfiOrNzK5dCV7RAW2YIpp+msxGUUX0NdRE9rkzjiv+3EMktgJm3eEIS+yxtlVdQ==", + "dependencies": { + "@aws-sdk/middleware-user-agent": "3.696.0", + "@aws-sdk/types": "3.696.0", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + }, + "peerDependencies": { + "aws-crt": ">=1.0.0" + }, + "peerDependenciesMeta": { + "aws-crt": { + "optional": true + } + } + }, "node_modules/@babel/code-frame": { "version": "7.26.2", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", @@ -165,6 +815,10 @@ "zod": "^3.23.8" } }, + "node_modules/@modelcontextprotocol/server-aws-kb-retrieval": { + "resolved": "src/aws-kb-retrieval-server", + "link": true + }, "node_modules/@modelcontextprotocol/server-brave-search": { "resolved": "src/brave-search", "link": true @@ -269,6 +923,582 @@ "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, + "node_modules/@smithy/abort-controller": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-3.1.8.tgz", + "integrity": "sha512-+3DOBcUn5/rVjlxGvUPKc416SExarAQ+Qe0bqk30YSUjbepwpS7QN0cyKUSifvLJhdMZ0WPzPP5ymut0oonrpQ==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/config-resolver": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-3.0.12.tgz", + "integrity": "sha512-YAJP9UJFZRZ8N+UruTeq78zkdjUHmzsY62J4qKWZ4SXB4QXJ/+680EfXXgkYA2xj77ooMqtUY9m406zGNqwivQ==", + "dependencies": { + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", + "@smithy/util-config-provider": "^3.0.0", + "@smithy/util-middleware": "^3.0.10", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/core": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-2.5.4.tgz", + "integrity": "sha512-iFh2Ymn2sCziBRLPuOOxRPkuCx/2gBdXtBGuCUFLUe6bWYjKnhHyIPqGeNkLZ5Aco/5GjebRTBFiWID3sDbrKw==", + "dependencies": { + "@smithy/middleware-serde": "^3.0.10", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "@smithy/util-body-length-browser": "^3.0.0", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-stream": "^3.3.1", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/credential-provider-imds": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-3.2.7.tgz", + "integrity": "sha512-cEfbau+rrWF8ylkmmVAObOmjbTIzKyUC5TkBL58SbLywD0RCBC4JAUKbmtSm2w5KUJNRPGgpGFMvE2FKnuNlWQ==", + "dependencies": { + "@smithy/node-config-provider": "^3.1.11", + "@smithy/property-provider": "^3.1.10", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/eventstream-codec": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-3.1.9.tgz", + "integrity": "sha512-F574nX0hhlNOjBnP+noLtsPFqXnWh2L0+nZKCwcu7P7J8k+k+rdIDs+RMnrMwrzhUE4mwMgyN0cYnEn0G8yrnQ==", + "dependencies": { + "@aws-crypto/crc32": "5.2.0", + "@smithy/types": "^3.7.1", + "@smithy/util-hex-encoding": "^3.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/eventstream-serde-browser": { + "version": "3.0.13", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-3.0.13.tgz", + "integrity": "sha512-Nee9m+97o9Qj6/XeLz2g2vANS2SZgAxV4rDBMKGHvFJHU/xz88x2RwCkwsvEwYjSX4BV1NG1JXmxEaDUzZTAtw==", + "dependencies": { + "@smithy/eventstream-serde-universal": "^3.0.12", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-config-resolver": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-3.0.10.tgz", + "integrity": "sha512-K1M0x7P7qbBUKB0UWIL5KOcyi6zqV5mPJoL0/o01HPJr0CSq3A9FYuJC6e11EX6hR8QTIR++DBiGrYveOu6trw==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-node": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-3.0.12.tgz", + "integrity": "sha512-kiZymxXvZ4tnuYsPSMUHe+MMfc4FTeFWJIc0Q5wygJoUQM4rVHNghvd48y7ppuulNMbuYt95ah71pYc2+o4JOA==", + "dependencies": { + "@smithy/eventstream-serde-universal": "^3.0.12", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/eventstream-serde-universal": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-3.0.12.tgz", + "integrity": "sha512-1i8ifhLJrOZ+pEifTlF0EfZzMLUGQggYQ6WmZ4d5g77zEKf7oZ0kvh1yKWHPjofvOwqrkwRDVuxuYC8wVd662A==", + "dependencies": { + "@smithy/eventstream-codec": "^3.1.9", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/fetch-http-handler": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-4.1.1.tgz", + "integrity": "sha512-bH7QW0+JdX0bPBadXt8GwMof/jz0H28I84hU1Uet9ISpzUqXqRQ3fEZJ+ANPOhzSEczYvANNl3uDQDYArSFDtA==", + "dependencies": { + "@smithy/protocol-http": "^4.1.7", + "@smithy/querystring-builder": "^3.0.10", + "@smithy/types": "^3.7.1", + "@smithy/util-base64": "^3.0.0", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/hash-node": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-3.0.10.tgz", + "integrity": "sha512-3zWGWCHI+FlJ5WJwx73Mw2llYR8aflVyZN5JhoqLxbdPZi6UyKSdCeXAWJw9ja22m6S6Tzz1KZ+kAaSwvydi0g==", + "dependencies": { + "@smithy/types": "^3.7.1", + "@smithy/util-buffer-from": "^3.0.0", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/invalid-dependency": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-3.0.10.tgz", + "integrity": "sha512-Lp2L65vFi+cj0vFMu2obpPW69DU+6O5g3086lmI4XcnRCG8PxvpWC7XyaVwJCxsZFzueHjXnrOH/E0pl0zikfA==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/is-array-buffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/is-array-buffer/-/is-array-buffer-3.0.0.tgz", + "integrity": "sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-content-length": { + "version": "3.0.12", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-3.0.12.tgz", + "integrity": "sha512-1mDEXqzM20yywaMDuf5o9ue8OkJ373lSPbaSjyEvkWdqELhFMyNNgKGWL/rCSf4KME8B+HlHKuR8u9kRj8HzEQ==", + "dependencies": { + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-endpoint": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-3.2.4.tgz", + "integrity": "sha512-TybiW2LA3kYVd3e+lWhINVu1o26KJbBwOpADnf0L4x/35vLVica77XVR5hvV9+kWeTGeSJ3IHTcYxbRxlbwhsg==", + "dependencies": { + "@smithy/core": "^2.5.4", + "@smithy/middleware-serde": "^3.0.10", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/shared-ini-file-loader": "^3.1.11", + "@smithy/types": "^3.7.1", + "@smithy/url-parser": "^3.0.10", + "@smithy/util-middleware": "^3.0.10", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-retry": { + "version": "3.0.28", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-3.0.28.tgz", + "integrity": "sha512-vK2eDfvIXG1U64FEUhYxoZ1JSj4XFbYWkK36iz02i3pFwWiDz1Q7jKhGTBCwx/7KqJNk4VS7d7cDLXFOvP7M+g==", + "dependencies": { + "@smithy/node-config-provider": "^3.1.11", + "@smithy/protocol-http": "^4.1.7", + "@smithy/service-error-classification": "^3.0.10", + "@smithy/smithy-client": "^3.4.5", + "@smithy/types": "^3.7.1", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-retry": "^3.0.10", + "tslib": "^2.6.2", + "uuid": "^9.0.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-serde": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-3.0.10.tgz", + "integrity": "sha512-MnAuhh+dD14F428ubSJuRnmRsfOpxSzvRhaGVTvd/lrUDE3kxzCCmH8lnVTvoNQnV2BbJ4c15QwZ3UdQBtFNZA==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/middleware-stack": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-3.0.10.tgz", + "integrity": "sha512-grCHyoiARDBBGPyw2BeicpjgpsDFWZZxptbVKb3CRd/ZA15F/T6rZjCCuBUjJwdck1nwUuIxYtsS4H9DDpbP5w==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/node-config-provider": { + "version": "3.1.11", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-3.1.11.tgz", + "integrity": "sha512-URq3gT3RpDikh/8MBJUB+QGZzfS7Bm6TQTqoh4CqE8NBuyPkWa5eUXj0XFcFfeZVgg3WMh1u19iaXn8FvvXxZw==", + "dependencies": { + "@smithy/property-provider": "^3.1.10", + "@smithy/shared-ini-file-loader": "^3.1.11", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/node-http-handler": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-3.3.1.tgz", + "integrity": "sha512-fr+UAOMGWh6bn4YSEezBCpJn9Ukp9oR4D32sCjCo7U81evE11YePOQ58ogzyfgmjIO79YeOdfXXqr0jyhPQeMg==", + "dependencies": { + "@smithy/abort-controller": "^3.1.8", + "@smithy/protocol-http": "^4.1.7", + "@smithy/querystring-builder": "^3.0.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/property-provider": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-3.1.10.tgz", + "integrity": "sha512-n1MJZGTorTH2DvyTVj+3wXnd4CzjJxyXeOgnTlgNVFxaaMeT4OteEp4QrzF8p9ee2yg42nvyVK6R/awLCakjeQ==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/protocol-http": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-4.1.7.tgz", + "integrity": "sha512-FP2LepWD0eJeOTm0SjssPcgqAlDFzOmRXqXmGhfIM52G7Lrox/pcpQf6RP4F21k0+O12zaqQt5fCDOeBtqY6Cg==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/querystring-builder": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-3.0.10.tgz", + "integrity": "sha512-nT9CQF3EIJtIUepXQuBFb8dxJi3WVZS3XfuDksxSCSn+/CzZowRLdhDn+2acbBv8R6eaJqPupoI/aRFIImNVPQ==", + "dependencies": { + "@smithy/types": "^3.7.1", + "@smithy/util-uri-escape": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/querystring-parser": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-3.0.10.tgz", + "integrity": "sha512-Oa0XDcpo9SmjhiDD9ua2UyM3uU01ZTuIrNdZvzwUTykW1PM8o2yJvMh1Do1rY5sUQg4NDV70dMi0JhDx4GyxuQ==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/service-error-classification": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-3.0.10.tgz", + "integrity": "sha512-zHe642KCqDxXLuhs6xmHVgRwy078RfqxP2wRDpIyiF8EmsWXptMwnMwbVa50lw+WOGNrYm9zbaEg0oDe3PTtvQ==", + "dependencies": { + "@smithy/types": "^3.7.1" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/shared-ini-file-loader": { + "version": "3.1.11", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.11.tgz", + "integrity": "sha512-AUdrIZHFtUgmfSN4Gq9nHu3IkHMa1YDcN+s061Nfm+6pQ0mJy85YQDB0tZBCmls0Vuj22pLwDPmL92+Hvfwwlg==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/signature-v4": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-4.2.3.tgz", + "integrity": "sha512-pPSQQ2v2vu9vc8iew7sszLd0O09I5TRc5zhY71KA+Ao0xYazIG+uLeHbTJfIWGO3BGVLiXjUr3EEeCcEQLjpWQ==", + "dependencies": { + "@smithy/is-array-buffer": "^3.0.0", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "@smithy/util-hex-encoding": "^3.0.0", + "@smithy/util-middleware": "^3.0.10", + "@smithy/util-uri-escape": "^3.0.0", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/smithy-client": { + "version": "3.4.5", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-3.4.5.tgz", + "integrity": "sha512-k0sybYT9zlP79sIKd1XGm4TmK0AS1nA2bzDHXx7m0nGi3RQ8dxxQUs4CPkSmQTKAo+KF9aINU3KzpGIpV7UoMw==", + "dependencies": { + "@smithy/core": "^2.5.4", + "@smithy/middleware-endpoint": "^3.2.4", + "@smithy/middleware-stack": "^3.0.10", + "@smithy/protocol-http": "^4.1.7", + "@smithy/types": "^3.7.1", + "@smithy/util-stream": "^3.3.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/types": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-3.7.1.tgz", + "integrity": "sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/url-parser": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-3.0.10.tgz", + "integrity": "sha512-j90NUalTSBR2NaZTuruEgavSdh8MLirf58LoGSk4AtQfyIymogIhgnGUU2Mga2bkMkpSoC9gxb74xBXL5afKAQ==", + "dependencies": { + "@smithy/querystring-parser": "^3.0.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/util-base64": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-base64/-/util-base64-3.0.0.tgz", + "integrity": "sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ==", + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-body-length-browser": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-browser/-/util-body-length-browser-3.0.0.tgz", + "integrity": "sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ==", + "dependencies": { + "tslib": "^2.6.2" + } + }, + "node_modules/@smithy/util-body-length-node": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-body-length-node/-/util-body-length-node-3.0.0.tgz", + "integrity": "sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-buffer-from": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-buffer-from/-/util-buffer-from-3.0.0.tgz", + "integrity": "sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA==", + "dependencies": { + "@smithy/is-array-buffer": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-config-provider": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-3.0.0.tgz", + "integrity": "sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-browser": { + "version": "3.0.28", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-3.0.28.tgz", + "integrity": "sha512-6bzwAbZpHRFVJsOztmov5PGDmJYsbNSoIEfHSJJyFLzfBGCCChiO3od9k7E/TLgrCsIifdAbB9nqbVbyE7wRUw==", + "dependencies": { + "@smithy/property-provider": "^3.1.10", + "@smithy/smithy-client": "^3.4.5", + "@smithy/types": "^3.7.1", + "bowser": "^2.11.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@smithy/util-defaults-mode-node": { + "version": "3.0.28", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-3.0.28.tgz", + "integrity": "sha512-78ENJDorV1CjOQselGmm3+z7Yqjj5HWCbjzh0Ixuq736dh1oEnD9sAttSBNSLlpZsX8VQnmERqA2fEFlmqWn8w==", + "dependencies": { + "@smithy/config-resolver": "^3.0.12", + "@smithy/credential-provider-imds": "^3.2.7", + "@smithy/node-config-provider": "^3.1.11", + "@smithy/property-provider": "^3.1.10", + "@smithy/smithy-client": "^3.4.5", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/@smithy/util-endpoints": { + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-2.1.6.tgz", + "integrity": "sha512-mFV1t3ndBh0yZOJgWxO9J/4cHZVn5UG1D8DeCc6/echfNkeEJWu9LD7mgGH5fHrEdR7LDoWw7PQO6QiGpHXhgA==", + "dependencies": { + "@smithy/node-config-provider": "^3.1.11", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-hex-encoding": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-hex-encoding/-/util-hex-encoding-3.0.0.tgz", + "integrity": "sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-middleware": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-3.0.10.tgz", + "integrity": "sha512-eJO+/+RsrG2RpmY68jZdwQtnfsxjmPxzMlQpnHKjFPwrYqvlcT+fHdT+ZVwcjlWSrByOhGr9Ff2GG17efc192A==", + "dependencies": { + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-retry": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-3.0.10.tgz", + "integrity": "sha512-1l4qatFp4PiU6j7UsbasUHL2VU023NRB/gfaa1M0rDqVrRN4g3mCArLRyH3OuktApA4ye+yjWQHjdziunw2eWA==", + "dependencies": { + "@smithy/service-error-classification": "^3.0.10", + "@smithy/types": "^3.7.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-stream": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-3.3.1.tgz", + "integrity": "sha512-Ff68R5lJh2zj+AUTvbAU/4yx+6QPRzg7+pI7M1FbtQHcRIp7xvguxVsQBKyB3fwiOwhAKu0lnNyYBaQfSW6TNw==", + "dependencies": { + "@smithy/fetch-http-handler": "^4.1.1", + "@smithy/node-http-handler": "^3.3.1", + "@smithy/types": "^3.7.1", + "@smithy/util-base64": "^3.0.0", + "@smithy/util-buffer-from": "^3.0.0", + "@smithy/util-hex-encoding": "^3.0.0", + "@smithy/util-utf8": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-uri-escape": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-uri-escape/-/util-uri-escape-3.0.0.tgz", + "integrity": "sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg==", + "dependencies": { + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@smithy/util-utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@smithy/util-utf8/-/util-utf8-3.0.0.tgz", + "integrity": "sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA==", + "dependencies": { + "@smithy/util-buffer-from": "^3.0.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/@tootallnate/quickjs-emscripten": { "version": "0.23.0", "resolved": "https://registry.npmjs.org/@tootallnate/quickjs-emscripten/-/quickjs-emscripten-0.23.0.tgz", @@ -657,6 +1887,11 @@ "node": ">= 0.8" } }, + "node_modules/bowser": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", + "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" + }, "node_modules/bplist-parser": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", @@ -1330,6 +2565,27 @@ "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" }, + "node_modules/fast-xml-parser": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", + "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/NaturalIntelligence" + }, + { + "type": "paypal", + "url": "https://paypal.me/naturalintelligence" + } + ], + "dependencies": { + "strnum": "^1.0.5" + }, + "bin": { + "fxparser": "src/cli/cli.js" + } + }, "node_modules/fd-slicer": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", @@ -3378,6 +4634,11 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strnum": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", + "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" + }, "node_modules/supports-preserve-symlinks-flag": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", @@ -3721,6 +4982,31 @@ "zod": "^3.23.3" } }, + "src/aws-kb-retrieval-server": { + "version": "0.6.2", + "license": "MIT", + "dependencies": { + "@aws-sdk/client-bedrock-agent-runtime": "^3.0.0", + "@modelcontextprotocol/sdk": "0.5.0" + }, + "bin": { + "mcp-server-aws-kb-retrieval": "dist/index.js" + }, + "devDependencies": { + "@types/node": "^20.10.0", + "shx": "^0.3.4", + "typescript": "^5.6.2" + } + }, + "src/aws-kb-retrieval-server/node_modules/@types/node": { + "version": "20.17.9", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.9.tgz", + "integrity": "sha512-0JOXkRyLanfGPE2QRCwgxhzlBAvaRdCNMcvbd7jFfpmD4eEXll7LRwy5ymJmyeZqk7Nh7eD2LeUyQ68BbndmXw==", + "dev": true, + "dependencies": { + "undici-types": "~6.19.2" + } + }, "src/brave-search": { "name": "@modelcontextprotocol/server-brave-search", "version": "0.6.2", From e8f08ac4794f9f5946adf301b20ab86f4b8dc6e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Hor=C3=A1k?= <162463026+jhhxgn@users.noreply.github.com> Date: Thu, 5 Dec 2024 17:33:11 +0100 Subject: [PATCH 40/43] Fixed Readme link to sequential thinking server --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 78f569f1..f4706613 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ These servers aim to demonstrate MCP features and the Typescript and Python SDK. - **[PostgreSQL](src/postgres)** - Read-only database access with schema inspection - **[Puppeteer](src/puppeteer)** - Browser automation and web scraping - **[Sentry](src/sentry)** - Retrieving and analyzing issues from Sentry.io -- **[Sequential Thinking](src/sequential-thinking)** - Dynamic and reflective problem-solving through thought sequences +- **[Sequential Thinking](src/sequentialthinking)** - Dynamic and reflective problem-solving through thought sequences - **[Slack](src/slack)** - Channel management and messaging capabilities - **[Sqlite](src/sqlite)** - Database interaction and business intelligence capabilities From 852aebf30bc98516768b7b417416d82c12b3d4b5 Mon Sep 17 00:00:00 2001 From: AP Date: Thu, 5 Dec 2024 14:03:51 -0800 Subject: [PATCH 41/43] browserbase readme --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 822ab053..e5f41461 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,7 @@ These servers aim to demonstrate MCP features and the Typescript and Python SDK. Official integrations are maintained by companies building production ready MCP servers for their platforms. +- Browserbase Logo **[Browserbase](https://github.com/browserbase/mcp-server-browserbase)** - Automate browser interactions in the cloud (e.g. web navigation, data extraction, form filling, and more) - **[Cloudflare](https://github.com/cloudflare/mcp-server-cloudflare)** - Deploy, configure & interrogate your resources on the Cloudflare developer platform (e.g. Workers/KV/R2/D1) - **[Raygun](https://github.com/MindscapeHQ/mcp-server-raygun)** - Interact with your crash reporting and real using monitoring data on your Raygun account - E2B Logo **[E2B](https://github.com/e2b-dev/mcp-server)** - Run code in secure sandboxes hosted by [E2B](https://e2b.dev) From 75e7fcea5824ab062077b158ebda353261b312ed Mon Sep 17 00:00:00 2001 From: Justin Spahr-Summers Date: Thu, 5 Dec 2024 22:55:41 +0000 Subject: [PATCH 42/43] `npm install` --- package-lock.json | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/package-lock.json b/package-lock.json index c73f44ee..e06112f2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4001,8 +4001,10 @@ "license": "MIT", "dependencies": { "@modelcontextprotocol/sdk": "1.0.1", + "@types/node": "^20.11.0", "@types/node-fetch": "^2.6.12", "node-fetch": "^3.3.2", + "zod": "^3.22.4", "zod-to-json-schema": "^3.23.5" }, "bin": { @@ -4023,6 +4025,14 @@ "zod": "^3.23.8" } }, + "src/github/node_modules/@types/node": { + "version": "20.17.9", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.9.tgz", + "integrity": "sha512-0JOXkRyLanfGPE2QRCwgxhzlBAvaRdCNMcvbd7jFfpmD4eEXll7LRwy5ymJmyeZqk7Nh7eD2LeUyQ68BbndmXw==", + "dependencies": { + "undici-types": "~6.19.2" + } + }, "src/github/node_modules/data-uri-to-buffer": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", From f7da6f41de04bf864de97470fb2422760a35c0d4 Mon Sep 17 00:00:00 2001 From: Justin Spahr-Summers Date: Thu, 5 Dec 2024 23:15:48 +0000 Subject: [PATCH 43/43] `npm install` --- package-lock.json | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/package-lock.json b/package-lock.json index b0b19b05..fa9ec691 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1523,6 +1523,12 @@ "@types/node": "*" } }, + "node_modules/@types/diff": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/diff/-/diff-5.2.3.tgz", + "integrity": "sha512-K0Oqlrq3kQMaO2RhfrNQX5trmt+XLyom88zS0u84nnIcLvFnRUMRRHmrGny5GSM+kNO9IZLARsdQHDzkhAgmrQ==", + "dev": true + }, "node_modules/@types/express": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.0.tgz", @@ -2258,6 +2264,14 @@ "resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1367902.tgz", "integrity": "sha512-XxtPuC3PGakY6PD7dG66/o8KwJ/LkH2/EKe19Dcw58w53dv4/vSQEkn/SzuyhHE2q4zPgCkxQBxus3VV4ql+Pg==" }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "engines": { + "node": ">=0.3.1" + } + }, "node_modules/dotenv": { "version": "16.4.6", "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.6.tgz", @@ -4983,6 +4997,7 @@ } }, "src/aws-kb-retrieval-server": { + "name": "@modelcontextprotocol/server-aws-kb-retrieval", "version": "0.6.2", "license": "MIT", "dependencies": { @@ -5167,7 +5182,8 @@ "version": "0.6.2", "license": "MIT", "dependencies": { - "@modelcontextprotocol/sdk": "1.0.1", + "@modelcontextprotocol/sdk": "0.5.0", + "diff": "^5.1.0", "glob": "^10.3.10", "zod-to-json-schema": "^3.23.5" }, @@ -5175,6 +5191,7 @@ "mcp-server-filesystem": "dist/index.js" }, "devDependencies": { + "@types/diff": "^5.0.9", "@types/node": "^20.11.0", "shx": "^0.3.4", "typescript": "^5.3.3"