removed .old files
This commit is contained in:
@@ -1,438 +0,0 @@
|
||||
/**
|
||||
* Unit Tests: Document Export Logic
|
||||
*
|
||||
* Tests document export functions in proxy.js
|
||||
* Tests T012, T013, T014, T040, T041
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
describe('Unit: validateDocumentId() (T012)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function validateDocumentId(id) {
|
||||
const pattern = /^[a-zA-Z0-9_-]{8,128}$/;
|
||||
return pattern.test(id);
|
||||
}
|
||||
|
||||
it('T012: should accept valid 8-character alphanumeric ID', () => {
|
||||
// Given: Valid 8-character document ID
|
||||
const validId = '1BxAA789';
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(validId);
|
||||
|
||||
// Then: Should return true
|
||||
assert.equal(isValid, true, 'Should accept 8-character alphanumeric ID');
|
||||
});
|
||||
|
||||
it('T012: should accept valid 128-character alphanumeric ID', () => {
|
||||
// Given: Valid 128-character document ID
|
||||
const validId = 'a'.repeat(128);
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(validId);
|
||||
|
||||
// Then: Should return true
|
||||
assert.equal(isValid, true, 'Should accept 128-character alphanumeric ID');
|
||||
});
|
||||
|
||||
it('T012: should accept IDs with hyphens and underscores', () => {
|
||||
// Given: Valid IDs with hyphens and underscores
|
||||
const idWithHyphen = '1BxAA-test-123';
|
||||
const idWithUnderscore = '1BxAA_test_123';
|
||||
const idWithBoth = '1BxAA-test_123';
|
||||
|
||||
// When: Validating document IDs
|
||||
const isValidHyphen = validateDocumentId(idWithHyphen);
|
||||
const isValidUnderscore = validateDocumentId(idWithUnderscore);
|
||||
const isValidBoth = validateDocumentId(idWithBoth);
|
||||
|
||||
// Then: Should return true for all
|
||||
assert.equal(isValidHyphen, true, 'Should accept IDs with hyphens');
|
||||
assert.equal(isValidUnderscore, true, 'Should accept IDs with underscores');
|
||||
assert.equal(isValidBoth, true, 'Should accept IDs with both hyphens and underscores');
|
||||
});
|
||||
|
||||
it('T012: should reject IDs shorter than 8 characters', () => {
|
||||
// Given: Invalid short ID
|
||||
const shortId = '1BxAA78';
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(shortId);
|
||||
|
||||
// Then: Should return false
|
||||
assert.equal(isValid, false, 'Should reject IDs shorter than 8 characters');
|
||||
});
|
||||
|
||||
it('T012: should reject IDs longer than 128 characters', () => {
|
||||
// Given: Invalid long ID
|
||||
const longId = 'a'.repeat(129);
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(longId);
|
||||
|
||||
// Then: Should return false
|
||||
assert.equal(isValid, false, 'Should reject IDs longer than 128 characters');
|
||||
});
|
||||
|
||||
it('T012: should reject IDs with invalid characters', () => {
|
||||
// Given: IDs with invalid characters
|
||||
const invalidChars = [
|
||||
'1BxAA@test', // @ symbol
|
||||
'1BxAA test', // space
|
||||
'1BxAA!test', // exclamation
|
||||
'1BxAA#test', // hash
|
||||
'1BxAA.test', // period
|
||||
];
|
||||
|
||||
// When: Validating each ID
|
||||
// Then: All should return false
|
||||
invalidChars.forEach(id => {
|
||||
const isValid = validateDocumentId(id);
|
||||
assert.equal(isValid, false, `Should reject ID with invalid character: ${id}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('T012: should reject empty string', () => {
|
||||
// Given: Empty string
|
||||
const emptyId = '';
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(emptyId);
|
||||
|
||||
// Then: Should return false
|
||||
assert.equal(isValid, false, 'Should reject empty string');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: findExportLink() (T013, T041)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function findExportLink(exportLinks, format = 'markdown') {
|
||||
if (!exportLinks) return null;
|
||||
|
||||
const formatMap = {
|
||||
'markdown': ['text/x-markdown', 'text/markdown', 'text/html'],
|
||||
'html': ['text/html'],
|
||||
'pdf': ['application/pdf']
|
||||
};
|
||||
|
||||
const mimeTypes = formatMap[format.toLowerCase()] || [];
|
||||
|
||||
for (const mimeType of mimeTypes) {
|
||||
if (exportLinks[mimeType]) {
|
||||
return exportLinks[mimeType];
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
it('T013: should select text/x-markdown from exportLinks when available', () => {
|
||||
// Given: exportLinks with text/x-markdown
|
||||
const exportLinks = {
|
||||
'text/x-markdown': 'https://docs.google.com/export?format=markdown',
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for markdown format
|
||||
const link = findExportLink(exportLinks, 'markdown');
|
||||
|
||||
// Then: Should select text/x-markdown
|
||||
assert.equal(link, exportLinks['text/x-markdown'], 'Should select text/x-markdown');
|
||||
});
|
||||
|
||||
it('T013: should fall back to text/html when text/x-markdown unavailable', () => {
|
||||
// Given: exportLinks without text/x-markdown or text/markdown
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for markdown format
|
||||
const link = findExportLink(exportLinks, 'markdown');
|
||||
|
||||
// Then: Should fall back to text/html
|
||||
assert.equal(link, exportLinks['text/html'], 'Should fall back to text/html');
|
||||
});
|
||||
|
||||
it('T013: should prefer text/markdown over text/html when available', () => {
|
||||
// Given: exportLinks with text/markdown
|
||||
const exportLinks = {
|
||||
'text/markdown': 'https://docs.google.com/export?format=markdown',
|
||||
'text/html': 'https://docs.google.com/export?format=html'
|
||||
};
|
||||
|
||||
// When: Finding export link for markdown format
|
||||
const link = findExportLink(exportLinks, 'markdown');
|
||||
|
||||
// Then: Should select text/markdown
|
||||
assert.equal(link, exportLinks['text/markdown'], 'Should prefer text/markdown');
|
||||
});
|
||||
|
||||
it('T041: should select text/html MIME type for html format', () => {
|
||||
// Given: exportLinks with multiple formats
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'text/x-markdown': 'https://docs.google.com/export?format=markdown',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for html format
|
||||
const link = findExportLink(exportLinks, 'html');
|
||||
|
||||
// Then: Should select text/html
|
||||
assert.equal(link, exportLinks['text/html'], 'Should select text/html for html format');
|
||||
});
|
||||
|
||||
it('T041: should select application/pdf MIME type for pdf format', () => {
|
||||
// Given: exportLinks with multiple formats
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for pdf format
|
||||
const link = findExportLink(exportLinks, 'pdf');
|
||||
|
||||
// Then: Should select application/pdf
|
||||
assert.equal(link, exportLinks['application/pdf'], 'Should select application/pdf for pdf format');
|
||||
});
|
||||
|
||||
it('T041: should return null when requested format unavailable', () => {
|
||||
// Given: exportLinks without PDF
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html'
|
||||
};
|
||||
|
||||
// When: Finding export link for pdf format
|
||||
const link = findExportLink(exportLinks, 'pdf');
|
||||
|
||||
// Then: Should return null
|
||||
assert.equal(link, null, 'Should return null when format unavailable');
|
||||
});
|
||||
|
||||
it('should return null when exportLinks is null or undefined', () => {
|
||||
// Given: Null or undefined exportLinks
|
||||
const linkFromNull = findExportLink(null, 'markdown');
|
||||
const linkFromUndefined = findExportLink(undefined, 'markdown');
|
||||
|
||||
// Then: Should return null
|
||||
assert.equal(linkFromNull, null, 'Should return null for null exportLinks');
|
||||
assert.equal(linkFromUndefined, null, 'Should return null for undefined exportLinks');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: validateDocumentSize() (T014)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function validateDocumentSize(metadata) {
|
||||
const maxSize = 20 * 1024 * 1024; // 20MB
|
||||
|
||||
// Native Drive files (Docs, Sheets, Slides) don't have size property
|
||||
if (!metadata.size) {
|
||||
return { valid: true };
|
||||
}
|
||||
|
||||
const size = parseInt(metadata.size, 10);
|
||||
|
||||
if (size > maxSize) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Document exceeds 20MB size limit',
|
||||
statusCode: 413
|
||||
};
|
||||
}
|
||||
|
||||
return { valid: true, size };
|
||||
}
|
||||
|
||||
it('T014: should accept documents under 20MB', () => {
|
||||
// Given: Document metadata with size < 20MB
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'test.pdf',
|
||||
size: '10485760' // 10MB
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be valid
|
||||
assert.equal(result.valid, true, 'Should accept document < 20MB');
|
||||
assert.equal(result.size, 10485760, 'Should return parsed size');
|
||||
});
|
||||
|
||||
it('T014: should accept documents exactly at 20MB', () => {
|
||||
// Given: Document metadata with size exactly 20MB
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'test.pdf',
|
||||
size: '20971520' // Exactly 20MB
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be valid
|
||||
assert.equal(result.valid, true, 'Should accept document exactly at 20MB');
|
||||
});
|
||||
|
||||
it('T014: should reject documents over 20MB', () => {
|
||||
// Given: Document metadata with size > 20MB
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'large.pdf',
|
||||
size: '20971521' // 20MB + 1 byte
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be invalid
|
||||
assert.equal(result.valid, false, 'Should reject document > 20MB');
|
||||
assert.equal(result.statusCode, 413, 'Should return 413 status code');
|
||||
assert.ok(result.error, 'Should include error message');
|
||||
});
|
||||
|
||||
it('T014: should accept native Google Drive documents without size', () => {
|
||||
// Given: Google Doc metadata (no size property)
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'My Document',
|
||||
mimeType: 'application/vnd.google-apps.document'
|
||||
// Note: No size property for native Drive files
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be valid (native files exported on-the-fly)
|
||||
assert.equal(result.valid, true, 'Should accept native Drive documents without size');
|
||||
});
|
||||
|
||||
it('T014: should handle size as number string', () => {
|
||||
// Given: Document metadata with size as string (Drive API returns strings)
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'test.pdf',
|
||||
size: '5242880' // 5MB as string
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should parse and validate correctly
|
||||
assert.equal(result.valid, true, 'Should handle size as string');
|
||||
assert.equal(result.size, 5242880, 'Should parse size to number');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: parseFormatParam() (T040)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function parseFormatParam(url) {
|
||||
const urlObj = new URL(url, 'http://localhost');
|
||||
const format = urlObj.searchParams.get('format');
|
||||
|
||||
if (!format) {
|
||||
return { valid: true, format: 'markdown' }; // Default
|
||||
}
|
||||
|
||||
const normalized = format.toLowerCase();
|
||||
const validFormats = ['markdown', 'html', 'pdf'];
|
||||
|
||||
if (!validFormats.includes(normalized)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Invalid format parameter',
|
||||
statusCode: 400
|
||||
};
|
||||
}
|
||||
|
||||
return { valid: true, format: normalized };
|
||||
}
|
||||
|
||||
it('T040: should extract format parameter from query string', () => {
|
||||
// Given: URL with format parameter
|
||||
const url = '/1BxAA_test?format=html';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should extract format
|
||||
assert.equal(result.valid, true, 'Should be valid');
|
||||
assert.equal(result.format, 'html', 'Should extract html format');
|
||||
});
|
||||
|
||||
it('T040: should validate against allowed values (markdown|html|pdf)', () => {
|
||||
// Given: URLs with valid formats
|
||||
const urls = [
|
||||
'/doc?format=markdown',
|
||||
'/doc?format=html',
|
||||
'/doc?format=pdf'
|
||||
];
|
||||
|
||||
// When: Parsing each URL
|
||||
// Then: All should be valid
|
||||
urls.forEach(url => {
|
||||
const result = parseFormatParam(url);
|
||||
assert.equal(result.valid, true, `Should accept format in ${url}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('T040: should return default markdown when format parameter missing', () => {
|
||||
// Given: URL without format parameter
|
||||
const url = '/1BxAA_test';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should default to markdown
|
||||
assert.equal(result.valid, true, 'Should be valid');
|
||||
assert.equal(result.format, 'markdown', 'Should default to markdown');
|
||||
});
|
||||
|
||||
it('T040: should normalize format to lowercase', () => {
|
||||
// Given: URL with uppercase format
|
||||
const urls = [
|
||||
'/doc?format=HTML',
|
||||
'/doc?format=Markdown',
|
||||
'/doc?format=PDF'
|
||||
];
|
||||
|
||||
// When: Parsing each URL
|
||||
// Then: Should normalize to lowercase
|
||||
assert.equal(parseFormatParam(urls[0]).format, 'html', 'Should normalize HTML to html');
|
||||
assert.equal(parseFormatParam(urls[1]).format, 'markdown', 'Should normalize Markdown to markdown');
|
||||
assert.equal(parseFormatParam(urls[2]).format, 'pdf', 'Should normalize PDF to pdf');
|
||||
});
|
||||
|
||||
it('T040: should return 400 status for invalid format values', () => {
|
||||
// Given: URL with invalid format
|
||||
const url = '/1BxAA_test?format=invalid';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should be invalid
|
||||
assert.equal(result.valid, false, 'Should be invalid');
|
||||
assert.equal(result.statusCode, 400, 'Should return 400 status');
|
||||
assert.ok(result.error, 'Should include error message');
|
||||
});
|
||||
|
||||
it('T040: should handle multiple query parameters', () => {
|
||||
// Given: URL with multiple query parameters
|
||||
const url = '/1BxAA_test?format=pdf&other=value&another=param';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should extract format correctly
|
||||
assert.equal(result.valid, true, 'Should be valid');
|
||||
assert.equal(result.format, 'pdf', 'Should extract format from multi-param URL');
|
||||
});
|
||||
});
|
||||
@@ -1,377 +0,0 @@
|
||||
/**
|
||||
* Unit Tests: Request Routing Logic
|
||||
*
|
||||
* Tests request routing and error mapping in proxy.js
|
||||
* Tests T015, T016, T050
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
describe('Unit: handleRequest() Routing (T015)', () => {
|
||||
|
||||
// Mock routing function (will be in proxy.js)
|
||||
function parseRoute(method, url) {
|
||||
if (method !== 'GET') {
|
||||
return { route: null, error: 'Method not allowed', statusCode: 405 };
|
||||
}
|
||||
|
||||
const urlObj = new URL(url, 'http://localhost');
|
||||
const path = urlObj.pathname;
|
||||
|
||||
if (path === '/health') {
|
||||
return { route: 'health' };
|
||||
}
|
||||
|
||||
if (path === '/sitemap.xml') {
|
||||
return { route: 'sitemap' };
|
||||
}
|
||||
|
||||
// Document route: /:documentId
|
||||
const docMatch = path.match(/^\/([a-zA-Z0-9_-]+)$/);
|
||||
if (docMatch) {
|
||||
return { route: 'document', documentId: docMatch[1] };
|
||||
}
|
||||
|
||||
return { route: null, error: 'Not found', statusCode: 404 };
|
||||
}
|
||||
|
||||
it('T015: should route /health to health check handler', () => {
|
||||
// Given: GET request to /health
|
||||
const method = 'GET';
|
||||
const url = '/health';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should route to health
|
||||
assert.equal(result.route, 'health', 'Should route to health handler');
|
||||
});
|
||||
|
||||
it('T015: should route /:documentId to document export handler', () => {
|
||||
// Given: GET request to /:documentId
|
||||
const method = 'GET';
|
||||
const url = '/1BxAA_testDocument123';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should route to document handler
|
||||
assert.equal(result.route, 'document', 'Should route to document handler');
|
||||
assert.equal(result.documentId, '1BxAA_testDocument123', 'Should extract document ID');
|
||||
});
|
||||
|
||||
it('T015: should route /sitemap.xml to sitemap handler', () => {
|
||||
// Given: GET request to /sitemap.xml
|
||||
const method = 'GET';
|
||||
const url = '/sitemap.xml';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should route to sitemap
|
||||
assert.equal(result.route, 'sitemap', 'Should route to sitemap handler');
|
||||
});
|
||||
|
||||
it('T015: should return 404 for unknown routes', () => {
|
||||
// Given: GET request to unknown path
|
||||
const method = 'GET';
|
||||
const url = '/unknown/path';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should return 404
|
||||
assert.equal(result.route, null, 'Should not match any route');
|
||||
assert.equal(result.statusCode, 404, 'Should return 404 status');
|
||||
});
|
||||
|
||||
it('T015: should return 405 for non-GET methods', () => {
|
||||
// Given: POST request
|
||||
const method = 'POST';
|
||||
const url = '/1BxAA_test';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should return 405 Method Not Allowed
|
||||
assert.equal(result.route, null, 'Should not match any route');
|
||||
assert.equal(result.statusCode, 405, 'Should return 405 status');
|
||||
});
|
||||
|
||||
it('T015: should extract documentId with hyphens and underscores', () => {
|
||||
// Given: Document ID with special allowed characters
|
||||
const urls = [
|
||||
'/1BxAA-test-123',
|
||||
'/1BxAA_test_123',
|
||||
'/1BxAA-test_123'
|
||||
];
|
||||
|
||||
// When: Parsing each route
|
||||
// Then: Should extract document IDs correctly
|
||||
urls.forEach(url => {
|
||||
const result = parseRoute('GET', url);
|
||||
assert.equal(result.route, 'document', `Should route ${url} to document handler`);
|
||||
assert.ok(result.documentId, `Should extract document ID from ${url}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: mapDriveError() (T016)', () => {
|
||||
|
||||
// Mock error mapping function (will be in proxy.js)
|
||||
function mapDriveError(error) {
|
||||
// Handle GaxiosError from googleapis
|
||||
const statusCode = error.code || error.response?.status || 500;
|
||||
|
||||
const mapping = {
|
||||
404: { status: 404, message: 'Not Found' },
|
||||
403: { status: 403, message: 'Forbidden' },
|
||||
401: { status: 401, message: 'Unauthorized' },
|
||||
429: { status: 429, message: 'Too Many Requests', retryAfter: 60 },
|
||||
500: { status: 500, message: 'Internal Server Error' },
|
||||
503: { status: 503, message: 'Service Unavailable' }
|
||||
};
|
||||
|
||||
return mapping[statusCode] || { status: 500, message: 'Internal Server Error' };
|
||||
}
|
||||
|
||||
it('T016: should convert Drive API 404 to HTTP 404', () => {
|
||||
// Given: Drive API 404 error
|
||||
const driveError = { code: 404, message: 'File not found' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 404
|
||||
assert.equal(result.status, 404, 'Should map to 404 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 403 to HTTP 403', () => {
|
||||
// Given: Drive API 403 error
|
||||
const driveError = { code: 403, message: 'Permission denied' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 403
|
||||
assert.equal(result.status, 403, 'Should map to 403 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 401 to HTTP 401', () => {
|
||||
// Given: Drive API 401 error
|
||||
const driveError = { code: 401, message: 'Invalid credentials' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 401
|
||||
assert.equal(result.status, 401, 'Should map to 401 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 429 to HTTP 429 with Retry-After', () => {
|
||||
// Given: Drive API rate limit error
|
||||
const driveError = { code: 429, message: 'Rate limit exceeded' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 429 with Retry-After
|
||||
assert.equal(result.status, 429, 'Should map to 429 status');
|
||||
assert.equal(result.retryAfter, 60, 'Should include Retry-After of 60 seconds');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 500 to HTTP 500', () => {
|
||||
// Given: Drive API internal error
|
||||
const driveError = { code: 500, message: 'Internal error' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 500
|
||||
assert.equal(result.status, 500, 'Should map to 500 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 503 to HTTP 503', () => {
|
||||
// Given: Drive API service unavailable
|
||||
const driveError = { code: 503, message: 'Service unavailable' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 503
|
||||
assert.equal(result.status, 503, 'Should map to 503 status');
|
||||
});
|
||||
|
||||
it('should handle errors without code by checking response.status', () => {
|
||||
// Given: Error with response.status instead of code
|
||||
const driveError = {
|
||||
response: { status: 404, statusText: 'Not Found' },
|
||||
message: 'Request failed'
|
||||
};
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map using response.status
|
||||
assert.equal(result.status, 404, 'Should map using response.status');
|
||||
});
|
||||
|
||||
it('should default to 500 for unknown error codes', () => {
|
||||
// Given: Error with unknown status code
|
||||
const driveError = { code: 999, message: 'Unknown error' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should default to 500
|
||||
assert.equal(result.status, 500, 'Should default to 500 for unknown codes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: Rate Limiting (T050)', () => {
|
||||
|
||||
// Mock rate limiter (will be in proxy.js)
|
||||
class RateLimiter {
|
||||
constructor(maxRequests = 100, windowMs = 60000) {
|
||||
this.maxRequests = maxRequests;
|
||||
this.windowMs = windowMs;
|
||||
this.requests = new Map(); // ip -> [timestamps]
|
||||
}
|
||||
|
||||
checkLimit(ip) {
|
||||
const now = Date.now();
|
||||
const windowStart = now - this.windowMs;
|
||||
|
||||
// Get existing requests for this IP
|
||||
let timestamps = this.requests.get(ip) || [];
|
||||
|
||||
// Remove old timestamps outside window
|
||||
timestamps = timestamps.filter(ts => ts > windowStart);
|
||||
|
||||
// Check if limit exceeded
|
||||
if (timestamps.length >= this.maxRequests) {
|
||||
const oldestRequest = timestamps[0];
|
||||
const retryAfter = Math.ceil((oldestRequest + this.windowMs - now) / 1000);
|
||||
|
||||
return {
|
||||
allowed: false,
|
||||
statusCode: 429,
|
||||
retryAfter
|
||||
};
|
||||
}
|
||||
|
||||
// Add current request
|
||||
timestamps.push(now);
|
||||
this.requests.set(ip, timestamps);
|
||||
|
||||
return { allowed: true };
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
const now = Date.now();
|
||||
const windowStart = now - this.windowMs;
|
||||
|
||||
for (const [ip, timestamps] of this.requests.entries()) {
|
||||
const filtered = timestamps.filter(ts => ts > windowStart);
|
||||
if (filtered.length === 0) {
|
||||
this.requests.delete(ip);
|
||||
} else {
|
||||
this.requests.set(ip, filtered);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it('T050: should allow 100 requests from same IP within window', () => {
|
||||
// Given: Rate limiter with 100 req/min limit
|
||||
const limiter = new RateLimiter(100, 60000);
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Making 100 requests
|
||||
let allowedCount = 0;
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const result = limiter.checkLimit(testIp);
|
||||
if (result.allowed) allowedCount++;
|
||||
}
|
||||
|
||||
// Then: All 100 requests should be allowed
|
||||
assert.equal(allowedCount, 100, 'Should allow 100 requests');
|
||||
});
|
||||
|
||||
it('T050: should return 429 with Retry-After header on 101st request', () => {
|
||||
// Given: Rate limiter with 100 req/min limit
|
||||
const limiter = new RateLimiter(100, 60000);
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Making 101 requests
|
||||
for (let i = 0; i < 100; i++) {
|
||||
limiter.checkLimit(testIp);
|
||||
}
|
||||
|
||||
const result = limiter.checkLimit(testIp);
|
||||
|
||||
// Then: 101st request should be rate limited
|
||||
assert.equal(result.allowed, false, 'Should not allow 101st request');
|
||||
assert.equal(result.statusCode, 429, 'Should return 429 status');
|
||||
assert.ok(result.retryAfter > 0, 'Should include Retry-After in seconds');
|
||||
assert.ok(result.retryAfter <= 60, 'Retry-After should be <= 60 seconds');
|
||||
});
|
||||
|
||||
it('T050: should track requests per IP independently', () => {
|
||||
// Given: Rate limiter and multiple IPs
|
||||
const limiter = new RateLimiter(100, 60000);
|
||||
const ip1 = '192.168.1.1';
|
||||
const ip2 = '192.168.1.2';
|
||||
|
||||
// When: Making 100 requests from each IP
|
||||
for (let i = 0; i < 100; i++) {
|
||||
limiter.checkLimit(ip1);
|
||||
limiter.checkLimit(ip2);
|
||||
}
|
||||
|
||||
// Then: Both IPs should still be allowed (independent limits)
|
||||
const result1 = limiter.checkLimit(ip1);
|
||||
const result2 = limiter.checkLimit(ip2);
|
||||
|
||||
assert.equal(result1.allowed, false, 'IP1 should be rate limited');
|
||||
assert.equal(result2.allowed, false, 'IP2 should be rate limited');
|
||||
});
|
||||
|
||||
it('T050: should cleanup old entries outside time window', () => {
|
||||
// Given: Rate limiter with short window
|
||||
const limiter = new RateLimiter(10, 1000); // 10 req/sec for testing
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Making requests then cleaning up
|
||||
for (let i = 0; i < 10; i++) {
|
||||
limiter.checkLimit(testIp);
|
||||
}
|
||||
|
||||
// Wait for window to pass (simulate with manual cleanup)
|
||||
limiter.cleanup();
|
||||
|
||||
// Then: Should have entries in map
|
||||
assert.ok(limiter.requests.has(testIp), 'Should have IP in requests map');
|
||||
});
|
||||
|
||||
it('T050: should reset limit after time window expires', () => {
|
||||
// Given: Rate limiter with very short window
|
||||
const limiter = new RateLimiter(5, 100); // 5 req / 100ms
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Filling up limit
|
||||
for (let i = 0; i < 5; i++) {
|
||||
limiter.checkLimit(testIp);
|
||||
}
|
||||
|
||||
// Simulate time passing by manipulating timestamps
|
||||
const oldTimestamps = limiter.requests.get(testIp);
|
||||
const expiredTimestamps = oldTimestamps.map(ts => ts - 200); // Make them 200ms old
|
||||
limiter.requests.set(testIp, expiredTimestamps);
|
||||
|
||||
// Then: New request should be allowed after window
|
||||
const result = limiter.checkLimit(testIp);
|
||||
assert.equal(result.allowed, true, 'Should allow request after window expires');
|
||||
});
|
||||
});
|
||||
@@ -1,386 +0,0 @@
|
||||
/**
|
||||
* Unit Tests: Sitemap Generation Logic
|
||||
*
|
||||
* Tests sitemap XML generation functions
|
||||
* Tests T028, T029, T030
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
describe('Unit: escapeXml() (T028)', () => {
|
||||
|
||||
// Mock XML escape function (will be in proxy.js)
|
||||
function escapeXml(str) {
|
||||
if (typeof str !== 'string') return '';
|
||||
|
||||
return str
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
it('T028: should escape < character to <', () => {
|
||||
// Given: String with < character
|
||||
const input = 'test < value';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape <
|
||||
assert.equal(output, 'test < value', 'Should escape <');
|
||||
});
|
||||
|
||||
it('T028: should escape > character to >', () => {
|
||||
// Given: String with > character
|
||||
const input = 'test > value';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape >
|
||||
assert.equal(output, 'test > value', 'Should escape >');
|
||||
});
|
||||
|
||||
it('T028: should escape & character to &', () => {
|
||||
// Given: String with & character
|
||||
const input = 'test & value';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape &
|
||||
assert.equal(output, 'test & value', 'Should escape &');
|
||||
});
|
||||
|
||||
it('T028: should escape " character to "', () => {
|
||||
// Given: String with " character
|
||||
const input = 'test "value"';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape "
|
||||
assert.equal(output, 'test "value"', 'Should escape "');
|
||||
});
|
||||
|
||||
it('T028: should escape \' character to '', () => {
|
||||
// Given: String with ' character
|
||||
const input = "test 'value'";
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape '
|
||||
assert.equal(output, 'test 'value'', 'Should escape \'');
|
||||
});
|
||||
|
||||
it('T028: should escape multiple special characters in correct order', () => {
|
||||
// Given: String with multiple special characters
|
||||
const input = '<tag attr="value" other=\'test\'>content & more</tag>';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape all characters properly
|
||||
assert.equal(
|
||||
output,
|
||||
'<tag attr="value" other='test'>content & more</tag>',
|
||||
'Should escape all XML special characters'
|
||||
);
|
||||
});
|
||||
|
||||
it('T028: should handle strings without special characters', () => {
|
||||
// Given: String without special characters
|
||||
const input = 'normal text 123';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should return unchanged
|
||||
assert.equal(output, input, 'Should not modify strings without special chars');
|
||||
});
|
||||
|
||||
it('T028: should handle empty string', () => {
|
||||
// Given: Empty string
|
||||
const input = '';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should return empty string
|
||||
assert.equal(output, '', 'Should handle empty string');
|
||||
});
|
||||
|
||||
it('T028: should handle non-string input gracefully', () => {
|
||||
// Given: Non-string inputs
|
||||
const inputs = [null, undefined, 123, { foo: 'bar' }];
|
||||
|
||||
// When: Escaping each input
|
||||
// Then: Should return empty string for non-strings
|
||||
inputs.forEach(input => {
|
||||
const output = escapeXml(input);
|
||||
assert.equal(output, '', `Should return empty string for ${typeof input}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: formatSitemapEntry() (T029)', () => {
|
||||
|
||||
// Mock sitemap entry formatter (will be in proxy.js)
|
||||
function formatSitemapEntry(document, baseUrl) {
|
||||
function escapeXml(str) {
|
||||
return str.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
}
|
||||
|
||||
const loc = `${baseUrl}/${document.id}`;
|
||||
const lastmod = document.modifiedTime;
|
||||
|
||||
return ` <url>
|
||||
<loc>${escapeXml(loc)}</loc>
|
||||
<lastmod>${lastmod}</lastmod>
|
||||
</url>`;
|
||||
}
|
||||
|
||||
it('T029: should convert DriveDocument to XML url element', () => {
|
||||
// Given: DriveDocument metadata
|
||||
const document = {
|
||||
id: '1BxAA_test123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Should generate valid XML
|
||||
assert.ok(xml.includes('<url>'), 'Should contain opening url tag');
|
||||
assert.ok(xml.includes('</url>'), 'Should contain closing url tag');
|
||||
assert.ok(xml.includes('<loc>'), 'Should contain loc element');
|
||||
assert.ok(xml.includes('</loc>'), 'Should contain closing loc tag');
|
||||
assert.ok(xml.includes('<lastmod>'), 'Should contain lastmod element');
|
||||
assert.ok(xml.includes('</lastmod>'), 'Should contain closing lastmod tag');
|
||||
});
|
||||
|
||||
it('T029: should include correct location URL with documentId', () => {
|
||||
// Given: DriveDocument metadata
|
||||
const document = {
|
||||
id: '1BxAA_test123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Location should point to adapter endpoint
|
||||
assert.ok(
|
||||
xml.includes(`<loc>http://localhost:3000/${document.id}</loc>`),
|
||||
'Should include correct location URL'
|
||||
);
|
||||
});
|
||||
|
||||
it('T029: should include ISO 8601 lastmod timestamp', () => {
|
||||
// Given: DriveDocument with modified time
|
||||
const document = {
|
||||
id: '1BxAA_test123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Should include lastmod with ISO 8601 timestamp
|
||||
assert.ok(
|
||||
xml.includes('<lastmod>2026-03-06T10:30:00Z</lastmod>'),
|
||||
'Should include ISO 8601 lastmod timestamp'
|
||||
);
|
||||
});
|
||||
|
||||
it('T029: should escape special XML characters in URL', () => {
|
||||
// Given: DriveDocument with special characters in ID (edge case)
|
||||
const document = {
|
||||
id: '1BxAA-test&123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Should escape & in URL
|
||||
assert.ok(
|
||||
xml.includes('&'),
|
||||
'Should escape special XML characters in URL'
|
||||
);
|
||||
});
|
||||
|
||||
it('T029: should handle different baseUrl formats', () => {
|
||||
// Given: Different baseUrl formats
|
||||
const document = {
|
||||
id: '1BxAA_test',
|
||||
name: 'Test',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
|
||||
const baseUrls = [
|
||||
'http://localhost:3000',
|
||||
'https://example.com',
|
||||
'https://api.example.com/v1'
|
||||
];
|
||||
|
||||
// When: Formatting with each baseUrl
|
||||
// Then: Should generate correct loc for each
|
||||
baseUrls.forEach(baseUrl => {
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
assert.ok(
|
||||
xml.includes(`<loc>${baseUrl}/${document.id}</loc>`),
|
||||
`Should work with baseUrl: ${baseUrl}`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: generateSitemap() Structure (T030)', () => {
|
||||
|
||||
// Mock sitemap generator structure (will be in proxy.js)
|
||||
function buildSitemapXml(documents, baseUrl) {
|
||||
function escapeXml(str) {
|
||||
return str.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
}
|
||||
|
||||
let xml = '<?xml version="1.0" encoding="UTF-8"?>\n';
|
||||
xml += '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n';
|
||||
|
||||
documents.forEach(doc => {
|
||||
const loc = `${baseUrl}/${doc.id}`;
|
||||
xml += ` <url>\n`;
|
||||
xml += ` <loc>${escapeXml(loc)}</loc>\n`;
|
||||
xml += ` <lastmod>${doc.modifiedTime}</lastmod>\n`;
|
||||
xml += ` </url>\n`;
|
||||
});
|
||||
|
||||
xml += '</urlset>';
|
||||
|
||||
return xml;
|
||||
}
|
||||
|
||||
it('T030: should build complete XML with declaration', () => {
|
||||
// Given: Array of documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should start with XML declaration
|
||||
assert.ok(
|
||||
xml.startsWith('<?xml version="1.0"'),
|
||||
'Should start with XML declaration'
|
||||
);
|
||||
});
|
||||
|
||||
it('T030: should include correct sitemap namespace', () => {
|
||||
// Given: Array of documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should include sitemap protocol namespace
|
||||
assert.ok(
|
||||
xml.includes('<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'),
|
||||
'Should include correct sitemap namespace'
|
||||
);
|
||||
});
|
||||
|
||||
it('T030: should include closing urlset tag', () => {
|
||||
// Given: Array of documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should end with closing urlset tag
|
||||
assert.ok(xml.endsWith('</urlset>'), 'Should end with closing urlset tag');
|
||||
});
|
||||
|
||||
it('T030: should include multiple url entries for multiple documents', () => {
|
||||
// Given: Multiple documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' },
|
||||
{ id: '2CyBB_doc2', name: 'Doc 2', modifiedTime: '2026-03-06T11:00:00Z' },
|
||||
{ id: '3DzCC_doc3', name: 'Doc 3', modifiedTime: '2026-03-06T12:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should include all documents
|
||||
const urlCount = (xml.match(/<url>/g) || []).length;
|
||||
assert.equal(urlCount, 3, 'Should include 3 url entries');
|
||||
|
||||
// Then: Each document should have its loc
|
||||
documents.forEach(doc => {
|
||||
assert.ok(
|
||||
xml.includes(`<loc>http://localhost:3000/${doc.id}</loc>`),
|
||||
`Should include url entry for ${doc.id}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('T030: should handle empty document list', () => {
|
||||
// Given: Empty documents array
|
||||
const documents = [];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should still have valid XML structure
|
||||
assert.ok(xml.includes('<?xml version'), 'Should have XML declaration');
|
||||
assert.ok(xml.includes('<urlset'), 'Should have urlset opening');
|
||||
assert.ok(xml.includes('</urlset>'), 'Should have urlset closing');
|
||||
|
||||
// Then: Should have no url entries
|
||||
const urlCount = (xml.match(/<url>/g) || []).length;
|
||||
assert.equal(urlCount, 0, 'Should have no url entries');
|
||||
});
|
||||
|
||||
it('T030: should generate valid XML that browsers can parse', () => {
|
||||
// Given: Sample documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_test', name: 'Test', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: XML should be well-formed (basic checks)
|
||||
// Count opening and closing tags
|
||||
const openingUrlset = (xml.match(/<urlset/g) || []).length;
|
||||
const closingUrlset = (xml.match(/<\/urlset>/g) || []).length;
|
||||
assert.equal(openingUrlset, closingUrlset, 'urlset tags should be balanced');
|
||||
|
||||
const openingUrl = (xml.match(/<url>/g) || []).length;
|
||||
const closingUrl = (xml.match(/<\/url>/g) || []).length;
|
||||
assert.equal(openingUrl, closingUrl, 'url tags should be balanced');
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user