Skip to content

fix: logging error #9

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Mar 28, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 4 additions & 8 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: Publish Package
on:
push:
tags:
- "*"
- '*'

jobs:
publish:
Expand All @@ -18,14 +18,10 @@ jobs:
node-version: '20.x'
registry-url: 'https://registry.npmjs.org'

- name: Install dependencies
run: npm ci

- name: Build
run: npm run build

- name: Publish to NPM
run: npm publish
run: |
npm install
npm run publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

Expand Down
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ npx -y @vectorize-io/vectorize-mcp-server@latest
}
}
```

## Tools

### Retrieve documents
Expand Down Expand Up @@ -78,8 +79,9 @@ Generate a Private Deep Research from your pipeline (see official [API](https://
"query": "Generate a financial status report about the company",
"webSearch": true
}
}
}
```

## Development

```bash
Expand Down
56 changes: 28 additions & 28 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,18 @@ import {
} from '@modelcontextprotocol/sdk/types.js';

import dotenv from 'dotenv';
import { Configuration, ExtractionApi, FilesApi, PipelinesApi } from '@vectorize-io/vectorize-client';
import {
Configuration,
ExtractionApi,
FilesApi,
PipelinesApi,
} from '@vectorize-io/vectorize-client';

dotenv.config();

const RETRIEVAL_TOOL: Tool = {
name: 'retrieve',
description: 'Retrieve documents from a Vectorize pipeline.',
description: 'Retrieve documents from the configured pipeline.',
inputSchema: {
type: 'object',
properties: {
Expand All @@ -26,16 +31,16 @@ const RETRIEVAL_TOOL: Tool = {
k: {
type: 'number',
description: 'The number of documents to retrieve.',
default: 4,
},
},
required: ['question', 'k']
required: ['question'],
},
};


const DEEP_RESEARCH_TOOL: Tool = {
name: 'deep-research',
description: 'Generate a deep research on a Vectorize pipeline.',
description: 'Generate a deep research on the configured pipeline.',
inputSchema: {
type: 'object',
properties: {
Expand All @@ -48,7 +53,7 @@ const DEEP_RESEARCH_TOOL: Tool = {
description: 'Whether to perform a web search.',
},
},
required: ['query', 'webSearch']
required: ['query', 'webSearch'],
},
};

Expand All @@ -66,7 +71,6 @@ const EXTRACTION_TOOL: Tool = {
type: 'string',
description: 'Document content type.',
},

},
required: ['base64Document', 'contentType'],
},
Expand Down Expand Up @@ -125,7 +129,6 @@ async function performRetrieval(
};
}


async function performExtraction(
orgId: string,
base64Document: string,
Expand All @@ -135,17 +138,17 @@ async function performExtraction(
const startResponse = await filesApi.startFileUpload({
organization: orgId,
startFileUploadRequest: {
name: "My File",
contentType
}
name: 'My File',
contentType,
},
});

const fileBuffer = Buffer.from(base64Document, 'base64');
const fetchResponse = await fetch(startResponse.uploadUrl, {
method: 'PUT',
body: fileBuffer,
headers: {
'Content-Type': contentType
'Content-Type': contentType,
},
});
if (!fetchResponse.ok) {
Expand All @@ -158,21 +161,21 @@ async function performExtraction(
startExtractionRequest: {
fileId: startResponse.fileId,
chunkSize: 512,
}
})
},
});
const extractionId = response.extractionId;
// eslint-disable-next-line no-constant-condition
while (true) {
const result = await extractionApi.getExtractionResult({
organization: orgId,
extractionId: extractionId,
})
});
if (result.ready) {
if (result.data?.success) {
return {
content: [{ type: 'text', text: JSON.stringify(result.data) }],
isError: false,
}
};
} else {
throw new Error(`Extraction failed: ${result.data?.error}`);
}
Expand All @@ -182,8 +185,6 @@ async function performExtraction(
}
}



async function performDeepResearch(
orgId: string,
pipelineId: string,
Expand All @@ -196,34 +197,33 @@ async function performDeepResearch(
pipeline: pipelineId,
startDeepResearchRequest: {
query,
webSearch
}
webSearch,
},
});
const researchId = response.researchId;
// eslint-disable-next-line no-constant-condition
while (true) {
const result = await pipelinesApi.getDeepResearchResult({
organization: orgId,
pipeline: pipelineId,
researchId: researchId
})
researchId: researchId,
});
if (result.ready) {
if (result.data?.success) {
return {
content: [{ type: 'text', text: result.data.markdown }],
isError: false,
}
};
} else {
throw new Error(`Deep research failed: ${result.data?.error}`);
}
break
break;
} else {
await new Promise((resolve) => setTimeout(resolve, 1000));
}
}
}


server.setRequestHandler(CallToolRequestSchema, async (request) => {
try {
const { name, arguments: args } = request.params;
Expand Down Expand Up @@ -279,7 +279,9 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
},
});
return {
content: [{ type: 'text', text: JSON.stringify({ error: errorMessage }) }],
content: [
{ type: 'text', text: JSON.stringify({ error: errorMessage }) },
],
isError: true,
};
}
Expand All @@ -300,8 +302,6 @@ async function runServer() {
level: 'info',
data: `Configuration: Organization ID: ${VECTORIZE_ORG_ID} with Pipeline ID: ${VECTORIZE_PIPELINE_ID}`,
});

console.info('Vectorize MCP Server running');
}

runServer().catch((error) => {
Expand Down