fix: chunk large note reads to prevent output-too-large errors (fixes #5)

Add offset and max_chars parameters to obsidian_read_note:
- max_chars (default 50000, max 500000): caps characters returned per call
- offset (default 0): start position for reading, enabling pagination

When content is truncated a trailer message is appended telling the
caller the total size and the exact offset to pass on the next call.

This prevents the 26MB+ responses that caused Claude to reject output
when reading large PDFs stored in an Obsidian vault.

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
2026-04-17 17:36:33 -05:00
parent 82d2409fe3
commit a0801a82fd
2 changed files with 38 additions and 7 deletions

View File

@@ -130,7 +130,7 @@ export async function registerFileOperationTools(server: ObsidianMCPServer): Pro
// T031: Read note tool
server.registerTool(
'obsidian_read_note',
'Read the content of a note from the Obsidian vault. Specify either the note name (file) or full path (path).',
'Read the content of a note from the Obsidian vault. Specify either the note name (file) or full path (path). For large files (e.g. PDFs), use max_chars and offset to read in chunks and avoid exceeding context limits.',
{
type: 'object',
properties: {
@@ -142,6 +142,14 @@ export async function registerFileOperationTools(server: ObsidianMCPServer): Pro
type: 'string',
description: 'Exact file path (folder/note.md)',
},
max_chars: {
type: 'number',
description: 'Maximum characters to return (default: 50000, max: 500000). Use to avoid output-too-large errors on big files.',
},
offset: {
type: 'number',
description: 'Character offset to start reading from (default: 0). Use with max_chars to page through large files.',
},
},
},
createToolHandler(
@@ -157,6 +165,14 @@ export async function registerFileOperationTools(server: ObsidianMCPServer): Pro
type: 'string',
description: 'Exact file path (folder/note.md)',
},
max_chars: {
type: 'number',
description: 'Maximum characters to return (default: 50000, max: 500000)',
},
offset: {
type: 'number',
description: 'Character offset to start reading from (default: 0)',
},
},
},
async (args) => {
@@ -170,11 +186,24 @@ export async function registerFileOperationTools(server: ObsidianMCPServer): Pro
const result = await executeObsidianCommand('read', cmdArgs);
handleCLIResult(result, { operation: 'read_note', identifier: sanitized.file || sanitized.path });
const offset: number = validated.offset ?? 0;
const maxChars: number = validated.max_chars ?? 50000;
const fullContent = result.stdout;
const totalChars = fullContent.length;
const chunk = fullContent.slice(offset, offset + maxChars);
const isTruncated = offset + maxChars < totalChars;
let text = chunk;
if (isTruncated) {
const nextOffset = offset + maxChars;
text += `\n\n[Content truncated: showing characters ${offset}${offset + chunk.length} of ${totalChars} total. To read the next chunk, call obsidian_read_note again with offset=${nextOffset}.]`;
}
return {
content: [
{
type: 'text',
text: formatForMCP(result.stdout, 'text'),
text,
},
],
};

View File

@@ -104,11 +104,13 @@ export const createNoteSchema = z.object({
});
// Read note parameters
export const readNoteSchema = z.union([
z.object({ file: noteNameSchema }),
z.object({ path: filePathSchema }),
]).refine(
(data) => ('file' in data && data.file) || ('path' in data && data.path),
export const readNoteSchema = z.object({
file: noteNameSchema.optional(),
path: filePathSchema.optional(),
offset: z.number().int().nonnegative().optional().default(0),
max_chars: z.number().int().positive().max(500000).optional().default(50000),
}).refine(
(data) => data.file || data.path,
{ message: 'Either file or path must be provided' }
);