removed .old files
This commit is contained in:
@@ -11,7 +11,7 @@
|
||||
"test:unit": "node --test tests/unit/**/*.test.js",
|
||||
"test:integration": "node --test tests/integration/**/*.test.js",
|
||||
"test:contract": "node --test tests/contract/**/*.test.js",
|
||||
"clean": "rm -rvf dist/* & rm -rvf **/*.{backup,backup-new,backup-old,backup-regenerated} & rm -rvf **/*-old.js"
|
||||
"clean": "rm -rvf dist/* & rm -rvf **/*.old & rm -rvf **/*.{backup,backup-new,backup-old,backup-regenerated} & rm -rvf **/*-old.js"
|
||||
},
|
||||
"keywords": [
|
||||
"google-drive",
|
||||
|
||||
@@ -1,377 +0,0 @@
|
||||
/**
|
||||
* Contract Tests: Document API
|
||||
*
|
||||
* Tests API contract compliance per OpenAPI specification
|
||||
* Tests T009, T010, T026, T037, T038, T039
|
||||
*/
|
||||
|
||||
import { describe, it, before, after } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { handleRequest } from '../../src/proxy.js';
|
||||
|
||||
// Test configuration
|
||||
const TEST_PORT = 3001;
|
||||
const BASE_URL = `http://localhost:${TEST_PORT}`;
|
||||
|
||||
// Server state
|
||||
let server;
|
||||
let serverReady = false;
|
||||
|
||||
// Setup global config for tests
|
||||
const configPath = path.join(process.cwd(), 'config', 'default.json');
|
||||
const configContent = fs.readFileSync(configPath, 'utf8');
|
||||
global.config = JSON.parse(configContent);
|
||||
global.config.server.port = TEST_PORT;
|
||||
|
||||
// Start server before all tests
|
||||
before(async () => {
|
||||
return new Promise((resolve) => {
|
||||
server = http.createServer(handleRequest);
|
||||
server.listen(TEST_PORT, () => {
|
||||
serverReady = true;
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Stop server after all tests
|
||||
after(async () => {
|
||||
return new Promise((resolve) => {
|
||||
if (server) {
|
||||
server.close(() => resolve());
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Make HTTP request and return response details
|
||||
*/
|
||||
async function makeRequest(path, method = 'GET') {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(`${BASE_URL}${path}`, { method }, (res) => {
|
||||
let data = '';
|
||||
res.on('data', chunk => data += chunk);
|
||||
res.on('end', () => {
|
||||
resolve({
|
||||
statusCode: res.statusCode,
|
||||
headers: res.headers,
|
||||
body: data
|
||||
});
|
||||
});
|
||||
});
|
||||
req.on('error', reject);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
describe('Contract: GET /:documentId (T009, T010)', () => {
|
||||
|
||||
it('T009: should return 200 with Content-Type text/markdown for valid document ID', async () => {
|
||||
// Given: A valid Google Drive document ID
|
||||
const documentId = '1BxAA_validDocumentId123';
|
||||
|
||||
// When: Making GET request to /:documentId
|
||||
const response = await makeRequest(`/${documentId}`);
|
||||
|
||||
// Then: Response should be 200 OK
|
||||
assert.equal(response.statusCode, 200, 'Status code should be 200 OK');
|
||||
|
||||
// Then: Content-Type should indicate Markdown
|
||||
assert.ok(
|
||||
response.headers['content-type']?.includes('text/markdown'),
|
||||
'Content-Type should be text/markdown'
|
||||
);
|
||||
|
||||
// Then: X-Request-Id header should be present for tracing
|
||||
assert.ok(
|
||||
response.headers['x-request-id'],
|
||||
'X-Request-Id header should be present'
|
||||
);
|
||||
assert.match(
|
||||
response.headers['x-request-id'],
|
||||
/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i,
|
||||
'X-Request-Id should be valid UUID v4'
|
||||
);
|
||||
|
||||
// Then: Body should contain Markdown content (non-empty)
|
||||
assert.ok(response.body.length > 0, 'Response body should not be empty');
|
||||
});
|
||||
|
||||
it('T009: should include X-Document-Title header in successful response', async () => {
|
||||
// Given: A valid Google Drive document ID
|
||||
const documentId = '1BxAA_validDocumentId123';
|
||||
|
||||
// When: Making GET request to /:documentId
|
||||
const response = await makeRequest(`/${documentId}`);
|
||||
|
||||
// Then: X-Document-Title header should be present
|
||||
assert.ok(
|
||||
response.headers['x-document-title'],
|
||||
'X-Document-Title header should be present'
|
||||
);
|
||||
});
|
||||
|
||||
it('T009: should include X-Document-Modified header with ISO 8601 timestamp', async () => {
|
||||
// Given: A valid Google Drive document ID
|
||||
const documentId = '1BxAA_validDocumentId123';
|
||||
|
||||
// When: Making GET request to /:documentId
|
||||
const response = await makeRequest(`/${documentId}`);
|
||||
|
||||
// Then: X-Document-Modified header should be present
|
||||
assert.ok(
|
||||
response.headers['x-document-modified'],
|
||||
'X-Document-Modified header should be present'
|
||||
);
|
||||
|
||||
// Then: Should be valid ISO 8601 timestamp
|
||||
const timestamp = response.headers['x-document-modified'];
|
||||
assert.ok(
|
||||
!isNaN(Date.parse(timestamp)),
|
||||
'X-Document-Modified should be valid ISO 8601 date'
|
||||
);
|
||||
});
|
||||
|
||||
it('T010: should return 404 with no body for invalid document ID', async () => {
|
||||
// Given: An invalid document ID (doesn't exist in Drive)
|
||||
const documentId = 'invalid-nonexistent-id';
|
||||
|
||||
// When: Making GET request to /:documentId
|
||||
const response = await makeRequest(`/${documentId}`);
|
||||
|
||||
// Then: Response should be 404 Not Found
|
||||
assert.equal(response.statusCode, 404, 'Status code should be 404 Not Found');
|
||||
|
||||
// Then: Response body should be empty (status-only error response)
|
||||
assert.equal(response.body, '', 'Response body should be empty per spec');
|
||||
});
|
||||
|
||||
it('T010: should return 403 with no body for document without permission', async () => {
|
||||
// Given: A document ID that user lacks permission to access
|
||||
const documentId = '1CyBB_forbiddenDocument456';
|
||||
|
||||
// When: Making GET request to /:documentId
|
||||
const response = await makeRequest(`/${documentId}`);
|
||||
|
||||
// Then: Response should be 403 Forbidden
|
||||
assert.equal(response.statusCode, 403, 'Status code should be 403 Forbidden');
|
||||
|
||||
// Then: Response body should be empty (status-only error response)
|
||||
assert.equal(response.body, '', 'Response body should be empty per spec');
|
||||
});
|
||||
|
||||
it('T010: should return 400 with no body for malformed document ID', async () => {
|
||||
// Given: A malformed document ID (too short, invalid characters)
|
||||
const documentId = 'bad@id!';
|
||||
|
||||
// When: Making GET request to /:documentId
|
||||
const response = await makeRequest(`/${documentId}`);
|
||||
|
||||
// Then: Response should be 400 Bad Request
|
||||
assert.equal(response.statusCode, 400, 'Status code should be 400 Bad Request');
|
||||
|
||||
// Then: Response body should be empty (status-only error response)
|
||||
assert.equal(response.body, '', 'Response body should be empty per spec');
|
||||
});
|
||||
|
||||
it('T010: should return 413 with no body for document exceeding 20MB limit', async () => {
|
||||
// Given: A document ID for file >20MB
|
||||
const documentId = '1DzCC_largeDocument25MB';
|
||||
|
||||
// When: Making GET request to /:documentId
|
||||
const response = await makeRequest(`/${documentId}`);
|
||||
|
||||
// Then: Response should be 413 Payload Too Large
|
||||
assert.equal(response.statusCode, 413, 'Status code should be 413 Payload Too Large');
|
||||
|
||||
// Then: Response body should be empty (status-only error response)
|
||||
assert.equal(response.body, '', 'Response body should be empty per spec');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Contract: GET /health', () => {
|
||||
|
||||
it('should return 200 with health status object', async () => {
|
||||
// When: Making GET request to /health
|
||||
const response = await makeRequest('/health');
|
||||
|
||||
// Then: Response should be 200 OK
|
||||
assert.equal(response.statusCode, 200, 'Status code should be 200 OK');
|
||||
|
||||
// Then: Content-Type should be application/json
|
||||
assert.ok(
|
||||
response.headers['content-type']?.includes('application/json'),
|
||||
'Content-Type should be application/json'
|
||||
);
|
||||
|
||||
// Then: Body should contain status field
|
||||
const health = JSON.parse(response.body);
|
||||
assert.equal(health.status, 'ok', 'Health status should be "ok"');
|
||||
assert.ok(health.version, 'Health response should include version');
|
||||
assert.ok(typeof health.uptime === 'number', 'Health response should include uptime in seconds');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Contract: GET /sitemap.xml (T026)', () => {
|
||||
|
||||
it('T026: should return 200 with Content-Type application/xml', async () => {
|
||||
// When: Making GET request to /sitemap.xml
|
||||
const response = await makeRequest('/sitemap.xml');
|
||||
|
||||
// Then: Response should be 200 OK
|
||||
assert.equal(response.statusCode, 200, 'Status code should be 200 OK');
|
||||
|
||||
// Then: Content-Type should be application/xml
|
||||
assert.ok(
|
||||
response.headers['content-type']?.includes('application/xml'),
|
||||
'Content-Type should be application/xml'
|
||||
);
|
||||
|
||||
// Then: X-Document-Count header should be present
|
||||
assert.ok(
|
||||
response.headers['x-document-count'],
|
||||
'X-Document-Count header should be present'
|
||||
);
|
||||
|
||||
// Then: Document count should be numeric
|
||||
const docCount = parseInt(response.headers['x-document-count'], 10);
|
||||
assert.ok(!isNaN(docCount), 'X-Document-Count should be numeric');
|
||||
assert.ok(docCount >= 0, 'X-Document-Count should be non-negative');
|
||||
});
|
||||
|
||||
it('T026: should return valid XML sitemap structure per sitemap protocol', async () => {
|
||||
// When: Making GET request to /sitemap.xml
|
||||
const response = await makeRequest('/sitemap.xml');
|
||||
|
||||
// Then: Should start with XML declaration
|
||||
assert.ok(
|
||||
response.body.startsWith('<?xml version="1.0"'),
|
||||
'Should start with XML declaration'
|
||||
);
|
||||
|
||||
// Then: Should contain urlset element with correct namespace
|
||||
assert.ok(
|
||||
response.body.includes('<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'),
|
||||
'Should contain urlset with sitemap namespace'
|
||||
);
|
||||
|
||||
// Then: Should contain closing urlset tag
|
||||
assert.ok(
|
||||
response.body.includes('</urlset>'),
|
||||
'Should contain closing urlset tag'
|
||||
);
|
||||
|
||||
// Then: Should contain at least one url entry (if documents exist)
|
||||
const docCount = parseInt(response.headers['x-document-count'], 10);
|
||||
if (docCount > 0) {
|
||||
assert.ok(
|
||||
response.body.includes('<url>') && response.body.includes('</url>'),
|
||||
'Should contain url entries when documents exist'
|
||||
);
|
||||
assert.ok(
|
||||
response.body.includes('<loc>') && response.body.includes('</loc>'),
|
||||
'URL entries should contain loc elements'
|
||||
);
|
||||
assert.ok(
|
||||
response.body.includes('<lastmod>') && response.body.includes('</lastmod>'),
|
||||
'URL entries should contain lastmod elements'
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Contract: GET /:documentId?format=html (T037)', () => {
|
||||
|
||||
it('T037: should return 200 with Content-Type text/html when format=html', async () => {
|
||||
// Given: A valid document ID and format=html parameter
|
||||
const documentId = '1BxAA_validDocumentId123';
|
||||
|
||||
// When: Making GET request with format parameter
|
||||
const response = await makeRequest(`/${documentId}?format=html`);
|
||||
|
||||
// Then: Response should be 200 OK
|
||||
assert.equal(response.statusCode, 200, 'Status code should be 200 OK');
|
||||
|
||||
// Then: Content-Type should be text/html
|
||||
assert.ok(
|
||||
response.headers['content-type']?.includes('text/html'),
|
||||
'Content-Type should be text/html'
|
||||
);
|
||||
|
||||
// Then: Body should contain HTML content
|
||||
assert.ok(response.body.length > 0, 'Response body should not be empty');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Contract: GET /:documentId?format=pdf (T038)', () => {
|
||||
|
||||
it('T038: should return 200 with Content-Type application/pdf when format=pdf', async () => {
|
||||
// Given: A valid document ID and format=pdf parameter
|
||||
const documentId = '1BxAA_validDocumentId123';
|
||||
|
||||
// When: Making GET request with format parameter
|
||||
const response = await makeRequest(`/${documentId}?format=pdf`);
|
||||
|
||||
// Then: Response should be 200 OK
|
||||
assert.equal(response.statusCode, 200, 'Status code should be 200 OK');
|
||||
|
||||
// Then: Content-Type should be application/pdf
|
||||
assert.ok(
|
||||
response.headers['content-type']?.includes('application/pdf'),
|
||||
'Content-Type should be application/pdf'
|
||||
);
|
||||
|
||||
// Then: Body should contain binary PDF content
|
||||
assert.ok(response.body.length > 0, 'Response body should not be empty');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Contract: Format parameter validation (T039)', () => {
|
||||
|
||||
it('T039: should return 400 with no body for invalid format parameter', async () => {
|
||||
// Given: A valid document ID but invalid format
|
||||
const documentId = '1BxAA_validDocumentId123';
|
||||
|
||||
// When: Making GET request with invalid format
|
||||
const response = await makeRequest(`/${documentId}?format=invalid`);
|
||||
|
||||
// Then: Response should be 400 Bad Request
|
||||
assert.equal(response.statusCode, 400, 'Status code should be 400 Bad Request');
|
||||
|
||||
// Then: Response body should be empty (status-only error response)
|
||||
assert.equal(response.body, '', 'Response body should be empty per spec');
|
||||
});
|
||||
|
||||
it('T039: should default to markdown when format parameter is missing', async () => {
|
||||
// Given: A valid document ID without format parameter
|
||||
const documentId = '1BxAA_validDocumentId123';
|
||||
|
||||
// When: Making GET request without format parameter
|
||||
const response = await makeRequest(`/${documentId}`);
|
||||
|
||||
// Then: Should return Markdown (default format)
|
||||
assert.ok(
|
||||
response.headers['content-type']?.includes('text/markdown'),
|
||||
'Should default to text/markdown when format not specified'
|
||||
);
|
||||
});
|
||||
|
||||
it('T039: should handle format parameter case-insensitively', async () => {
|
||||
// Given: A valid document ID with uppercase format parameter
|
||||
const documentId = '1BxAA_validDocumentId123';
|
||||
|
||||
// When: Making GET request with uppercase format
|
||||
const response = await makeRequest(`/${documentId}?format=HTML`);
|
||||
|
||||
// Then: Should accept case-insensitive format
|
||||
assert.ok(
|
||||
response.statusCode === 200 || response.statusCode === 415,
|
||||
'Should handle uppercase format parameter'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,395 +0,0 @@
|
||||
/**
|
||||
* Integration Tests: Google Drive API Integration
|
||||
*
|
||||
* Tests OAuth 2.0 and Drive API integration
|
||||
* Tests T011, T027, T057
|
||||
*/
|
||||
|
||||
import { describe, it, before, after } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { google } from 'googleapis';
|
||||
|
||||
describe('Integration: OAuth2 Client Initialization (T011)', () => {
|
||||
|
||||
let oauth2Client;
|
||||
|
||||
before(() => {
|
||||
// Mock global.config for testing
|
||||
global.config = {
|
||||
google: {
|
||||
clientId: 'test-client-id.apps.googleusercontent.com',
|
||||
clientSecret: 'test-client-secret',
|
||||
redirectUri: 'http://localhost:3000/oauth/callback',
|
||||
scopes: [
|
||||
'https://www.googleapis.com/auth/drive.readonly',
|
||||
'https://www.googleapis.com/auth/drive.metadata.readonly'
|
||||
]
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('T011: should initialize OAuth2 client from global.config', () => {
|
||||
// Given: global.config contains OAuth credentials
|
||||
const { clientId, clientSecret, redirectUri } = global.config.google;
|
||||
|
||||
// When: Creating OAuth2 client
|
||||
oauth2Client = new google.auth.OAuth2(
|
||||
clientId,
|
||||
clientSecret,
|
||||
redirectUri
|
||||
);
|
||||
|
||||
// Then: Client should be initialized
|
||||
assert.ok(oauth2Client, 'OAuth2 client should be initialized');
|
||||
assert.equal(oauth2Client._clientId, clientId, 'Client ID should match config');
|
||||
assert.equal(oauth2Client._clientSecret, clientSecret, 'Client secret should match config');
|
||||
});
|
||||
|
||||
it('T011: should set credentials with access and refresh tokens', () => {
|
||||
// Given: OAuth2 client is initialized
|
||||
const credentials = {
|
||||
access_token: 'ya29.test_access_token',
|
||||
refresh_token: '1//test_refresh_token',
|
||||
token_type: 'Bearer',
|
||||
expiry_date: Date.now() + 3600000 // 1 hour from now
|
||||
};
|
||||
|
||||
// When: Setting credentials
|
||||
oauth2Client.setCredentials(credentials);
|
||||
|
||||
// Then: Credentials should be set
|
||||
const creds = oauth2Client.credentials;
|
||||
assert.equal(creds.access_token, credentials.access_token, 'Access token should be set');
|
||||
assert.equal(creds.refresh_token, credentials.refresh_token, 'Refresh token should be set');
|
||||
});
|
||||
|
||||
it('T011: should listen for token refresh events', (t, done) => {
|
||||
// Given: OAuth2 client with credentials
|
||||
let tokenRefreshed = false;
|
||||
|
||||
// When: Listening for tokens event
|
||||
oauth2Client.on('tokens', (tokens) => {
|
||||
tokenRefreshed = true;
|
||||
assert.ok(tokens, 'Tokens should be emitted on refresh');
|
||||
done();
|
||||
});
|
||||
|
||||
// Then: Event listener should be registered
|
||||
assert.ok(oauth2Client.listenerCount('tokens') > 0, 'Should have tokens event listener');
|
||||
|
||||
// Manually emit to test listener (in real scenario, googleapis emits this)
|
||||
oauth2Client.emit('tokens', { access_token: 'new_token' });
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration: Drive API files.get() (T011)', () => {
|
||||
|
||||
let drive;
|
||||
|
||||
before(() => {
|
||||
// Initialize Drive API client (will use mocked auth in tests)
|
||||
const auth = new google.auth.OAuth2(
|
||||
global.config.google.clientId,
|
||||
global.config.google.clientSecret,
|
||||
global.config.google.redirectUri
|
||||
);
|
||||
|
||||
auth.setCredentials({
|
||||
access_token: 'test_token',
|
||||
refresh_token: 'test_refresh'
|
||||
});
|
||||
|
||||
drive = google.drive({ version: 'v3', auth });
|
||||
});
|
||||
|
||||
it('T011: should call files.get() with exportLinks field parameter', async () => {
|
||||
// Given: A document ID
|
||||
const fileId = '1BxAA_testDocumentId';
|
||||
|
||||
// When: Calling files.get() with fields parameter
|
||||
// Note: This will fail in tests without real Drive API access (expected in TDD red phase)
|
||||
try {
|
||||
const response = await drive.files.get({
|
||||
fileId,
|
||||
fields: 'id,name,mimeType,modifiedTime,size,exportLinks,webViewLink'
|
||||
});
|
||||
|
||||
// Then: Response should contain expected fields
|
||||
assert.ok(response.data, 'Response should contain data');
|
||||
assert.ok(response.data.id, 'Response should contain id field');
|
||||
assert.ok(response.data.name, 'Response should contain name field');
|
||||
|
||||
} catch (error) {
|
||||
// Expected to fail without real credentials - this is TDD red phase
|
||||
assert.ok(
|
||||
error.message.includes('invalid') || error.message.includes('auth') || error.message.includes('credentials'),
|
||||
'Should fail with auth-related error in test environment'
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('T011: should handle token expiry and refresh', async () => {
|
||||
// Given: OAuth2 client with expired token
|
||||
const auth = new google.auth.OAuth2(
|
||||
global.config.google.clientId,
|
||||
global.config.google.clientSecret,
|
||||
global.config.google.redirectUri
|
||||
);
|
||||
|
||||
// Set expired token
|
||||
auth.setCredentials({
|
||||
access_token: 'expired_token',
|
||||
refresh_token: 'valid_refresh_token',
|
||||
expiry_date: Date.now() - 1000 // Expired 1 second ago
|
||||
});
|
||||
|
||||
// When: Making API call with expired token
|
||||
// Then: googleapis should automatically refresh (or fail trying)
|
||||
const drive = google.drive({ version: 'v3', auth });
|
||||
|
||||
try {
|
||||
await drive.files.get({ fileId: 'test', fields: 'id' });
|
||||
} catch (error) {
|
||||
// Expected to fail in test environment - validates refresh attempt
|
||||
assert.ok(error, 'Should attempt token refresh and fail without real refresh token');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration: Drive API files.list() with Pagination (T027)', () => {
|
||||
|
||||
let drive;
|
||||
|
||||
before(() => {
|
||||
const auth = new google.auth.OAuth2(
|
||||
global.config.google.clientId,
|
||||
global.config.google.clientSecret,
|
||||
global.config.google.redirectUri
|
||||
);
|
||||
|
||||
auth.setCredentials({
|
||||
access_token: 'test_token',
|
||||
refresh_token: 'test_refresh'
|
||||
});
|
||||
|
||||
drive = google.drive({ version: 'v3', auth });
|
||||
});
|
||||
|
||||
it('T027: should retrieve paginated list of documents', async () => {
|
||||
// Given: Drive API client
|
||||
let allFiles = [];
|
||||
let pageToken = null;
|
||||
|
||||
// When: Retrieving files with pagination
|
||||
try {
|
||||
do {
|
||||
const response = await drive.files.list({
|
||||
pageSize: 100,
|
||||
pageToken,
|
||||
fields: 'nextPageToken,files(id,name,mimeType,modifiedTime)',
|
||||
q: "mimeType='application/vnd.google-apps.document'"
|
||||
});
|
||||
|
||||
// Then: Response should contain files array
|
||||
assert.ok(Array.isArray(response.data.files), 'Response should contain files array');
|
||||
allFiles = allFiles.concat(response.data.files);
|
||||
|
||||
// Update pageToken for next iteration
|
||||
pageToken = response.data.nextPageToken;
|
||||
|
||||
} while (pageToken);
|
||||
|
||||
// Then: Should have retrieved all files
|
||||
assert.ok(allFiles.length >= 0, 'Should retrieve files (may be 0 in test)');
|
||||
|
||||
} catch (error) {
|
||||
// Expected to fail without real credentials
|
||||
assert.ok(
|
||||
error.message.includes('invalid') || error.message.includes('auth'),
|
||||
'Should fail with auth error in test environment'
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('T027: should handle large result sets (>1000 documents)', async () => {
|
||||
// Given: Query that might return many documents
|
||||
let pageCount = 0;
|
||||
let pageToken = null;
|
||||
const maxPages = 15; // Test pagination up to 1500 docs (100 per page)
|
||||
|
||||
// When: Paginating through results
|
||||
try {
|
||||
do {
|
||||
const response = await drive.files.list({
|
||||
pageSize: 100,
|
||||
pageToken,
|
||||
fields: 'nextPageToken,files(id,name)',
|
||||
q: "trashed=false"
|
||||
});
|
||||
|
||||
pageCount++;
|
||||
pageToken = response.data.nextPageToken;
|
||||
|
||||
// Then: Should handle pagination correctly
|
||||
assert.ok(pageCount <= maxPages, 'Should not infinite loop');
|
||||
|
||||
if (!pageToken) break; // No more pages
|
||||
|
||||
} while (pageCount < maxPages);
|
||||
|
||||
assert.ok(pageCount > 0, 'Should process at least one page');
|
||||
|
||||
} catch (error) {
|
||||
// Expected to fail without real credentials
|
||||
assert.ok(error, 'Should handle auth error gracefully');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration: Large Document Streaming (T057)', () => {
|
||||
|
||||
it('T057: should stream 5MB document without excessive memory usage', async () => {
|
||||
// Given: A large document (5MB)
|
||||
const initialMemory = process.memoryUsage().heapUsed;
|
||||
|
||||
// When: Streaming large document
|
||||
// (This would be a real streaming operation in implementation)
|
||||
const mockStreamSize = 5 * 1024 * 1024; // 5MB
|
||||
const chunks = [];
|
||||
const chunkSize = 64 * 1024; // 64KB chunks
|
||||
|
||||
// Simulate streaming by processing chunks
|
||||
for (let i = 0; i < mockStreamSize; i += chunkSize) {
|
||||
const chunk = Buffer.alloc(Math.min(chunkSize, mockStreamSize - i));
|
||||
chunks.push(chunk);
|
||||
}
|
||||
|
||||
// Then: Memory increase should be reasonable (<100MB)
|
||||
const finalMemory = process.memoryUsage().heapUsed;
|
||||
const memoryIncrease = (finalMemory - initialMemory) / (1024 * 1024); // MB
|
||||
|
||||
assert.ok(
|
||||
memoryIncrease < 100,
|
||||
`Memory increase should be <100MB for 5MB document, was ${memoryIncrease.toFixed(2)}MB`
|
||||
);
|
||||
});
|
||||
|
||||
it('T057: should handle streaming with backpressure', async () => {
|
||||
// Given: A mock readable stream
|
||||
const { Readable } = await import('node:stream');
|
||||
|
||||
let chunksRead = 0;
|
||||
const totalChunks = 100;
|
||||
|
||||
const mockStream = new Readable({
|
||||
read() {
|
||||
if (chunksRead < totalChunks) {
|
||||
this.push(Buffer.alloc(1024)); // 1KB chunk
|
||||
chunksRead++;
|
||||
} else {
|
||||
this.push(null); // EOF
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// When: Consuming stream with backpressure handling
|
||||
const chunks = [];
|
||||
for await (const chunk of mockStream) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
|
||||
// Then: All chunks should be received
|
||||
assert.equal(chunks.length, totalChunks, 'Should receive all chunks');
|
||||
assert.equal(chunksRead, totalChunks, 'Should read all chunks');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Integration: Drive API Error Mapping', () => {
|
||||
|
||||
it('should map Drive API 404 error to HTTP 404', () => {
|
||||
// Given: Drive API 404 error
|
||||
const driveError = {
|
||||
code: 404,
|
||||
message: 'File not found'
|
||||
};
|
||||
|
||||
// When: Mapping to HTTP status
|
||||
const httpStatus = driveError.code;
|
||||
|
||||
// Then: Should map to 404
|
||||
assert.equal(httpStatus, 404, 'Drive 404 should map to HTTP 404');
|
||||
});
|
||||
|
||||
it('should map Drive API 403 error to HTTP 403', () => {
|
||||
// Given: Drive API 403 error
|
||||
const driveError = {
|
||||
code: 403,
|
||||
message: 'The user does not have permission'
|
||||
};
|
||||
|
||||
// When: Mapping to HTTP status
|
||||
const httpStatus = driveError.code;
|
||||
|
||||
// Then: Should map to 403
|
||||
assert.equal(httpStatus, 403, 'Drive 403 should map to HTTP 403');
|
||||
});
|
||||
|
||||
it('should map Drive API 401 error to HTTP 401', () => {
|
||||
// Given: Drive API 401 error
|
||||
const driveError = {
|
||||
code: 401,
|
||||
message: 'Invalid credentials'
|
||||
};
|
||||
|
||||
// When: Mapping to HTTP status
|
||||
const httpStatus = driveError.code;
|
||||
|
||||
// Then: Should map to 401
|
||||
assert.equal(httpStatus, 401, 'Drive 401 should map to HTTP 401');
|
||||
});
|
||||
|
||||
it('should map Drive API 429 error to HTTP 429 with Retry-After', () => {
|
||||
// Given: Drive API rate limit error
|
||||
const driveError = {
|
||||
code: 429,
|
||||
message: 'Rate limit exceeded',
|
||||
errors: [{ reason: 'rateLimitExceeded' }]
|
||||
};
|
||||
|
||||
// When: Mapping to HTTP status and calculating retry delay
|
||||
const httpStatus = driveError.code;
|
||||
const retryAfter = 60; // Default 60 seconds
|
||||
|
||||
// Then: Should map to 429 with Retry-After header
|
||||
assert.equal(httpStatus, 429, 'Drive 429 should map to HTTP 429');
|
||||
assert.equal(retryAfter, 60, 'Should include Retry-After of 60 seconds');
|
||||
});
|
||||
|
||||
it('should map Drive API 500 error to HTTP 500', () => {
|
||||
// Given: Drive API internal error
|
||||
const driveError = {
|
||||
code: 500,
|
||||
message: 'Internal server error'
|
||||
};
|
||||
|
||||
// When: Mapping to HTTP status
|
||||
const httpStatus = driveError.code;
|
||||
|
||||
// Then: Should map to 500
|
||||
assert.equal(httpStatus, 500, 'Drive 500 should map to HTTP 500');
|
||||
});
|
||||
|
||||
it('should map Drive API 503 error to HTTP 503', () => {
|
||||
// Given: Drive API service unavailable
|
||||
const driveError = {
|
||||
code: 503,
|
||||
message: 'Service unavailable'
|
||||
};
|
||||
|
||||
// When: Mapping to HTTP status
|
||||
const httpStatus = driveError.code;
|
||||
|
||||
// Then: Should map to 503
|
||||
assert.equal(httpStatus, 503, 'Drive 503 should map to HTTP 503');
|
||||
});
|
||||
});
|
||||
@@ -1,438 +0,0 @@
|
||||
/**
|
||||
* Unit Tests: Document Export Logic
|
||||
*
|
||||
* Tests document export functions in proxy.js
|
||||
* Tests T012, T013, T014, T040, T041
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
describe('Unit: validateDocumentId() (T012)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function validateDocumentId(id) {
|
||||
const pattern = /^[a-zA-Z0-9_-]{8,128}$/;
|
||||
return pattern.test(id);
|
||||
}
|
||||
|
||||
it('T012: should accept valid 8-character alphanumeric ID', () => {
|
||||
// Given: Valid 8-character document ID
|
||||
const validId = '1BxAA789';
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(validId);
|
||||
|
||||
// Then: Should return true
|
||||
assert.equal(isValid, true, 'Should accept 8-character alphanumeric ID');
|
||||
});
|
||||
|
||||
it('T012: should accept valid 128-character alphanumeric ID', () => {
|
||||
// Given: Valid 128-character document ID
|
||||
const validId = 'a'.repeat(128);
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(validId);
|
||||
|
||||
// Then: Should return true
|
||||
assert.equal(isValid, true, 'Should accept 128-character alphanumeric ID');
|
||||
});
|
||||
|
||||
it('T012: should accept IDs with hyphens and underscores', () => {
|
||||
// Given: Valid IDs with hyphens and underscores
|
||||
const idWithHyphen = '1BxAA-test-123';
|
||||
const idWithUnderscore = '1BxAA_test_123';
|
||||
const idWithBoth = '1BxAA-test_123';
|
||||
|
||||
// When: Validating document IDs
|
||||
const isValidHyphen = validateDocumentId(idWithHyphen);
|
||||
const isValidUnderscore = validateDocumentId(idWithUnderscore);
|
||||
const isValidBoth = validateDocumentId(idWithBoth);
|
||||
|
||||
// Then: Should return true for all
|
||||
assert.equal(isValidHyphen, true, 'Should accept IDs with hyphens');
|
||||
assert.equal(isValidUnderscore, true, 'Should accept IDs with underscores');
|
||||
assert.equal(isValidBoth, true, 'Should accept IDs with both hyphens and underscores');
|
||||
});
|
||||
|
||||
it('T012: should reject IDs shorter than 8 characters', () => {
|
||||
// Given: Invalid short ID
|
||||
const shortId = '1BxAA78';
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(shortId);
|
||||
|
||||
// Then: Should return false
|
||||
assert.equal(isValid, false, 'Should reject IDs shorter than 8 characters');
|
||||
});
|
||||
|
||||
it('T012: should reject IDs longer than 128 characters', () => {
|
||||
// Given: Invalid long ID
|
||||
const longId = 'a'.repeat(129);
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(longId);
|
||||
|
||||
// Then: Should return false
|
||||
assert.equal(isValid, false, 'Should reject IDs longer than 128 characters');
|
||||
});
|
||||
|
||||
it('T012: should reject IDs with invalid characters', () => {
|
||||
// Given: IDs with invalid characters
|
||||
const invalidChars = [
|
||||
'1BxAA@test', // @ symbol
|
||||
'1BxAA test', // space
|
||||
'1BxAA!test', // exclamation
|
||||
'1BxAA#test', // hash
|
||||
'1BxAA.test', // period
|
||||
];
|
||||
|
||||
// When: Validating each ID
|
||||
// Then: All should return false
|
||||
invalidChars.forEach(id => {
|
||||
const isValid = validateDocumentId(id);
|
||||
assert.equal(isValid, false, `Should reject ID with invalid character: ${id}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('T012: should reject empty string', () => {
|
||||
// Given: Empty string
|
||||
const emptyId = '';
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(emptyId);
|
||||
|
||||
// Then: Should return false
|
||||
assert.equal(isValid, false, 'Should reject empty string');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: findExportLink() (T013, T041)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function findExportLink(exportLinks, format = 'markdown') {
|
||||
if (!exportLinks) return null;
|
||||
|
||||
const formatMap = {
|
||||
'markdown': ['text/x-markdown', 'text/markdown', 'text/html'],
|
||||
'html': ['text/html'],
|
||||
'pdf': ['application/pdf']
|
||||
};
|
||||
|
||||
const mimeTypes = formatMap[format.toLowerCase()] || [];
|
||||
|
||||
for (const mimeType of mimeTypes) {
|
||||
if (exportLinks[mimeType]) {
|
||||
return exportLinks[mimeType];
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
it('T013: should select text/x-markdown from exportLinks when available', () => {
|
||||
// Given: exportLinks with text/x-markdown
|
||||
const exportLinks = {
|
||||
'text/x-markdown': 'https://docs.google.com/export?format=markdown',
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for markdown format
|
||||
const link = findExportLink(exportLinks, 'markdown');
|
||||
|
||||
// Then: Should select text/x-markdown
|
||||
assert.equal(link, exportLinks['text/x-markdown'], 'Should select text/x-markdown');
|
||||
});
|
||||
|
||||
it('T013: should fall back to text/html when text/x-markdown unavailable', () => {
|
||||
// Given: exportLinks without text/x-markdown or text/markdown
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for markdown format
|
||||
const link = findExportLink(exportLinks, 'markdown');
|
||||
|
||||
// Then: Should fall back to text/html
|
||||
assert.equal(link, exportLinks['text/html'], 'Should fall back to text/html');
|
||||
});
|
||||
|
||||
it('T013: should prefer text/markdown over text/html when available', () => {
|
||||
// Given: exportLinks with text/markdown
|
||||
const exportLinks = {
|
||||
'text/markdown': 'https://docs.google.com/export?format=markdown',
|
||||
'text/html': 'https://docs.google.com/export?format=html'
|
||||
};
|
||||
|
||||
// When: Finding export link for markdown format
|
||||
const link = findExportLink(exportLinks, 'markdown');
|
||||
|
||||
// Then: Should select text/markdown
|
||||
assert.equal(link, exportLinks['text/markdown'], 'Should prefer text/markdown');
|
||||
});
|
||||
|
||||
it('T041: should select text/html MIME type for html format', () => {
|
||||
// Given: exportLinks with multiple formats
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'text/x-markdown': 'https://docs.google.com/export?format=markdown',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for html format
|
||||
const link = findExportLink(exportLinks, 'html');
|
||||
|
||||
// Then: Should select text/html
|
||||
assert.equal(link, exportLinks['text/html'], 'Should select text/html for html format');
|
||||
});
|
||||
|
||||
it('T041: should select application/pdf MIME type for pdf format', () => {
|
||||
// Given: exportLinks with multiple formats
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for pdf format
|
||||
const link = findExportLink(exportLinks, 'pdf');
|
||||
|
||||
// Then: Should select application/pdf
|
||||
assert.equal(link, exportLinks['application/pdf'], 'Should select application/pdf for pdf format');
|
||||
});
|
||||
|
||||
it('T041: should return null when requested format unavailable', () => {
|
||||
// Given: exportLinks without PDF
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html'
|
||||
};
|
||||
|
||||
// When: Finding export link for pdf format
|
||||
const link = findExportLink(exportLinks, 'pdf');
|
||||
|
||||
// Then: Should return null
|
||||
assert.equal(link, null, 'Should return null when format unavailable');
|
||||
});
|
||||
|
||||
it('should return null when exportLinks is null or undefined', () => {
|
||||
// Given: Null or undefined exportLinks
|
||||
const linkFromNull = findExportLink(null, 'markdown');
|
||||
const linkFromUndefined = findExportLink(undefined, 'markdown');
|
||||
|
||||
// Then: Should return null
|
||||
assert.equal(linkFromNull, null, 'Should return null for null exportLinks');
|
||||
assert.equal(linkFromUndefined, null, 'Should return null for undefined exportLinks');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: validateDocumentSize() (T014)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function validateDocumentSize(metadata) {
|
||||
const maxSize = 20 * 1024 * 1024; // 20MB
|
||||
|
||||
// Native Drive files (Docs, Sheets, Slides) don't have size property
|
||||
if (!metadata.size) {
|
||||
return { valid: true };
|
||||
}
|
||||
|
||||
const size = parseInt(metadata.size, 10);
|
||||
|
||||
if (size > maxSize) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Document exceeds 20MB size limit',
|
||||
statusCode: 413
|
||||
};
|
||||
}
|
||||
|
||||
return { valid: true, size };
|
||||
}
|
||||
|
||||
it('T014: should accept documents under 20MB', () => {
|
||||
// Given: Document metadata with size < 20MB
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'test.pdf',
|
||||
size: '10485760' // 10MB
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be valid
|
||||
assert.equal(result.valid, true, 'Should accept document < 20MB');
|
||||
assert.equal(result.size, 10485760, 'Should return parsed size');
|
||||
});
|
||||
|
||||
it('T014: should accept documents exactly at 20MB', () => {
|
||||
// Given: Document metadata with size exactly 20MB
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'test.pdf',
|
||||
size: '20971520' // Exactly 20MB
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be valid
|
||||
assert.equal(result.valid, true, 'Should accept document exactly at 20MB');
|
||||
});
|
||||
|
||||
it('T014: should reject documents over 20MB', () => {
|
||||
// Given: Document metadata with size > 20MB
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'large.pdf',
|
||||
size: '20971521' // 20MB + 1 byte
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be invalid
|
||||
assert.equal(result.valid, false, 'Should reject document > 20MB');
|
||||
assert.equal(result.statusCode, 413, 'Should return 413 status code');
|
||||
assert.ok(result.error, 'Should include error message');
|
||||
});
|
||||
|
||||
it('T014: should accept native Google Drive documents without size', () => {
|
||||
// Given: Google Doc metadata (no size property)
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'My Document',
|
||||
mimeType: 'application/vnd.google-apps.document'
|
||||
// Note: No size property for native Drive files
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be valid (native files exported on-the-fly)
|
||||
assert.equal(result.valid, true, 'Should accept native Drive documents without size');
|
||||
});
|
||||
|
||||
it('T014: should handle size as number string', () => {
|
||||
// Given: Document metadata with size as string (Drive API returns strings)
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'test.pdf',
|
||||
size: '5242880' // 5MB as string
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should parse and validate correctly
|
||||
assert.equal(result.valid, true, 'Should handle size as string');
|
||||
assert.equal(result.size, 5242880, 'Should parse size to number');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: parseFormatParam() (T040)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function parseFormatParam(url) {
|
||||
const urlObj = new URL(url, 'http://localhost');
|
||||
const format = urlObj.searchParams.get('format');
|
||||
|
||||
if (!format) {
|
||||
return { valid: true, format: 'markdown' }; // Default
|
||||
}
|
||||
|
||||
const normalized = format.toLowerCase();
|
||||
const validFormats = ['markdown', 'html', 'pdf'];
|
||||
|
||||
if (!validFormats.includes(normalized)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Invalid format parameter',
|
||||
statusCode: 400
|
||||
};
|
||||
}
|
||||
|
||||
return { valid: true, format: normalized };
|
||||
}
|
||||
|
||||
it('T040: should extract format parameter from query string', () => {
|
||||
// Given: URL with format parameter
|
||||
const url = '/1BxAA_test?format=html';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should extract format
|
||||
assert.equal(result.valid, true, 'Should be valid');
|
||||
assert.equal(result.format, 'html', 'Should extract html format');
|
||||
});
|
||||
|
||||
it('T040: should validate against allowed values (markdown|html|pdf)', () => {
|
||||
// Given: URLs with valid formats
|
||||
const urls = [
|
||||
'/doc?format=markdown',
|
||||
'/doc?format=html',
|
||||
'/doc?format=pdf'
|
||||
];
|
||||
|
||||
// When: Parsing each URL
|
||||
// Then: All should be valid
|
||||
urls.forEach(url => {
|
||||
const result = parseFormatParam(url);
|
||||
assert.equal(result.valid, true, `Should accept format in ${url}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('T040: should return default markdown when format parameter missing', () => {
|
||||
// Given: URL without format parameter
|
||||
const url = '/1BxAA_test';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should default to markdown
|
||||
assert.equal(result.valid, true, 'Should be valid');
|
||||
assert.equal(result.format, 'markdown', 'Should default to markdown');
|
||||
});
|
||||
|
||||
it('T040: should normalize format to lowercase', () => {
|
||||
// Given: URL with uppercase format
|
||||
const urls = [
|
||||
'/doc?format=HTML',
|
||||
'/doc?format=Markdown',
|
||||
'/doc?format=PDF'
|
||||
];
|
||||
|
||||
// When: Parsing each URL
|
||||
// Then: Should normalize to lowercase
|
||||
assert.equal(parseFormatParam(urls[0]).format, 'html', 'Should normalize HTML to html');
|
||||
assert.equal(parseFormatParam(urls[1]).format, 'markdown', 'Should normalize Markdown to markdown');
|
||||
assert.equal(parseFormatParam(urls[2]).format, 'pdf', 'Should normalize PDF to pdf');
|
||||
});
|
||||
|
||||
it('T040: should return 400 status for invalid format values', () => {
|
||||
// Given: URL with invalid format
|
||||
const url = '/1BxAA_test?format=invalid';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should be invalid
|
||||
assert.equal(result.valid, false, 'Should be invalid');
|
||||
assert.equal(result.statusCode, 400, 'Should return 400 status');
|
||||
assert.ok(result.error, 'Should include error message');
|
||||
});
|
||||
|
||||
it('T040: should handle multiple query parameters', () => {
|
||||
// Given: URL with multiple query parameters
|
||||
const url = '/1BxAA_test?format=pdf&other=value&another=param';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should extract format correctly
|
||||
assert.equal(result.valid, true, 'Should be valid');
|
||||
assert.equal(result.format, 'pdf', 'Should extract format from multi-param URL');
|
||||
});
|
||||
});
|
||||
@@ -1,377 +0,0 @@
|
||||
/**
|
||||
* Unit Tests: Request Routing Logic
|
||||
*
|
||||
* Tests request routing and error mapping in proxy.js
|
||||
* Tests T015, T016, T050
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
describe('Unit: handleRequest() Routing (T015)', () => {
|
||||
|
||||
// Mock routing function (will be in proxy.js)
|
||||
function parseRoute(method, url) {
|
||||
if (method !== 'GET') {
|
||||
return { route: null, error: 'Method not allowed', statusCode: 405 };
|
||||
}
|
||||
|
||||
const urlObj = new URL(url, 'http://localhost');
|
||||
const path = urlObj.pathname;
|
||||
|
||||
if (path === '/health') {
|
||||
return { route: 'health' };
|
||||
}
|
||||
|
||||
if (path === '/sitemap.xml') {
|
||||
return { route: 'sitemap' };
|
||||
}
|
||||
|
||||
// Document route: /:documentId
|
||||
const docMatch = path.match(/^\/([a-zA-Z0-9_-]+)$/);
|
||||
if (docMatch) {
|
||||
return { route: 'document', documentId: docMatch[1] };
|
||||
}
|
||||
|
||||
return { route: null, error: 'Not found', statusCode: 404 };
|
||||
}
|
||||
|
||||
it('T015: should route /health to health check handler', () => {
|
||||
// Given: GET request to /health
|
||||
const method = 'GET';
|
||||
const url = '/health';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should route to health
|
||||
assert.equal(result.route, 'health', 'Should route to health handler');
|
||||
});
|
||||
|
||||
it('T015: should route /:documentId to document export handler', () => {
|
||||
// Given: GET request to /:documentId
|
||||
const method = 'GET';
|
||||
const url = '/1BxAA_testDocument123';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should route to document handler
|
||||
assert.equal(result.route, 'document', 'Should route to document handler');
|
||||
assert.equal(result.documentId, '1BxAA_testDocument123', 'Should extract document ID');
|
||||
});
|
||||
|
||||
it('T015: should route /sitemap.xml to sitemap handler', () => {
|
||||
// Given: GET request to /sitemap.xml
|
||||
const method = 'GET';
|
||||
const url = '/sitemap.xml';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should route to sitemap
|
||||
assert.equal(result.route, 'sitemap', 'Should route to sitemap handler');
|
||||
});
|
||||
|
||||
it('T015: should return 404 for unknown routes', () => {
|
||||
// Given: GET request to unknown path
|
||||
const method = 'GET';
|
||||
const url = '/unknown/path';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should return 404
|
||||
assert.equal(result.route, null, 'Should not match any route');
|
||||
assert.equal(result.statusCode, 404, 'Should return 404 status');
|
||||
});
|
||||
|
||||
it('T015: should return 405 for non-GET methods', () => {
|
||||
// Given: POST request
|
||||
const method = 'POST';
|
||||
const url = '/1BxAA_test';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should return 405 Method Not Allowed
|
||||
assert.equal(result.route, null, 'Should not match any route');
|
||||
assert.equal(result.statusCode, 405, 'Should return 405 status');
|
||||
});
|
||||
|
||||
it('T015: should extract documentId with hyphens and underscores', () => {
|
||||
// Given: Document ID with special allowed characters
|
||||
const urls = [
|
||||
'/1BxAA-test-123',
|
||||
'/1BxAA_test_123',
|
||||
'/1BxAA-test_123'
|
||||
];
|
||||
|
||||
// When: Parsing each route
|
||||
// Then: Should extract document IDs correctly
|
||||
urls.forEach(url => {
|
||||
const result = parseRoute('GET', url);
|
||||
assert.equal(result.route, 'document', `Should route ${url} to document handler`);
|
||||
assert.ok(result.documentId, `Should extract document ID from ${url}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: mapDriveError() (T016)', () => {
|
||||
|
||||
// Mock error mapping function (will be in proxy.js)
|
||||
function mapDriveError(error) {
|
||||
// Handle GaxiosError from googleapis
|
||||
const statusCode = error.code || error.response?.status || 500;
|
||||
|
||||
const mapping = {
|
||||
404: { status: 404, message: 'Not Found' },
|
||||
403: { status: 403, message: 'Forbidden' },
|
||||
401: { status: 401, message: 'Unauthorized' },
|
||||
429: { status: 429, message: 'Too Many Requests', retryAfter: 60 },
|
||||
500: { status: 500, message: 'Internal Server Error' },
|
||||
503: { status: 503, message: 'Service Unavailable' }
|
||||
};
|
||||
|
||||
return mapping[statusCode] || { status: 500, message: 'Internal Server Error' };
|
||||
}
|
||||
|
||||
it('T016: should convert Drive API 404 to HTTP 404', () => {
|
||||
// Given: Drive API 404 error
|
||||
const driveError = { code: 404, message: 'File not found' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 404
|
||||
assert.equal(result.status, 404, 'Should map to 404 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 403 to HTTP 403', () => {
|
||||
// Given: Drive API 403 error
|
||||
const driveError = { code: 403, message: 'Permission denied' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 403
|
||||
assert.equal(result.status, 403, 'Should map to 403 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 401 to HTTP 401', () => {
|
||||
// Given: Drive API 401 error
|
||||
const driveError = { code: 401, message: 'Invalid credentials' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 401
|
||||
assert.equal(result.status, 401, 'Should map to 401 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 429 to HTTP 429 with Retry-After', () => {
|
||||
// Given: Drive API rate limit error
|
||||
const driveError = { code: 429, message: 'Rate limit exceeded' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 429 with Retry-After
|
||||
assert.equal(result.status, 429, 'Should map to 429 status');
|
||||
assert.equal(result.retryAfter, 60, 'Should include Retry-After of 60 seconds');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 500 to HTTP 500', () => {
|
||||
// Given: Drive API internal error
|
||||
const driveError = { code: 500, message: 'Internal error' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 500
|
||||
assert.equal(result.status, 500, 'Should map to 500 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 503 to HTTP 503', () => {
|
||||
// Given: Drive API service unavailable
|
||||
const driveError = { code: 503, message: 'Service unavailable' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 503
|
||||
assert.equal(result.status, 503, 'Should map to 503 status');
|
||||
});
|
||||
|
||||
it('should handle errors without code by checking response.status', () => {
|
||||
// Given: Error with response.status instead of code
|
||||
const driveError = {
|
||||
response: { status: 404, statusText: 'Not Found' },
|
||||
message: 'Request failed'
|
||||
};
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map using response.status
|
||||
assert.equal(result.status, 404, 'Should map using response.status');
|
||||
});
|
||||
|
||||
it('should default to 500 for unknown error codes', () => {
|
||||
// Given: Error with unknown status code
|
||||
const driveError = { code: 999, message: 'Unknown error' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should default to 500
|
||||
assert.equal(result.status, 500, 'Should default to 500 for unknown codes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: Rate Limiting (T050)', () => {
|
||||
|
||||
// Mock rate limiter (will be in proxy.js)
|
||||
class RateLimiter {
|
||||
constructor(maxRequests = 100, windowMs = 60000) {
|
||||
this.maxRequests = maxRequests;
|
||||
this.windowMs = windowMs;
|
||||
this.requests = new Map(); // ip -> [timestamps]
|
||||
}
|
||||
|
||||
checkLimit(ip) {
|
||||
const now = Date.now();
|
||||
const windowStart = now - this.windowMs;
|
||||
|
||||
// Get existing requests for this IP
|
||||
let timestamps = this.requests.get(ip) || [];
|
||||
|
||||
// Remove old timestamps outside window
|
||||
timestamps = timestamps.filter(ts => ts > windowStart);
|
||||
|
||||
// Check if limit exceeded
|
||||
if (timestamps.length >= this.maxRequests) {
|
||||
const oldestRequest = timestamps[0];
|
||||
const retryAfter = Math.ceil((oldestRequest + this.windowMs - now) / 1000);
|
||||
|
||||
return {
|
||||
allowed: false,
|
||||
statusCode: 429,
|
||||
retryAfter
|
||||
};
|
||||
}
|
||||
|
||||
// Add current request
|
||||
timestamps.push(now);
|
||||
this.requests.set(ip, timestamps);
|
||||
|
||||
return { allowed: true };
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
const now = Date.now();
|
||||
const windowStart = now - this.windowMs;
|
||||
|
||||
for (const [ip, timestamps] of this.requests.entries()) {
|
||||
const filtered = timestamps.filter(ts => ts > windowStart);
|
||||
if (filtered.length === 0) {
|
||||
this.requests.delete(ip);
|
||||
} else {
|
||||
this.requests.set(ip, filtered);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it('T050: should allow 100 requests from same IP within window', () => {
|
||||
// Given: Rate limiter with 100 req/min limit
|
||||
const limiter = new RateLimiter(100, 60000);
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Making 100 requests
|
||||
let allowedCount = 0;
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const result = limiter.checkLimit(testIp);
|
||||
if (result.allowed) allowedCount++;
|
||||
}
|
||||
|
||||
// Then: All 100 requests should be allowed
|
||||
assert.equal(allowedCount, 100, 'Should allow 100 requests');
|
||||
});
|
||||
|
||||
it('T050: should return 429 with Retry-After header on 101st request', () => {
|
||||
// Given: Rate limiter with 100 req/min limit
|
||||
const limiter = new RateLimiter(100, 60000);
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Making 101 requests
|
||||
for (let i = 0; i < 100; i++) {
|
||||
limiter.checkLimit(testIp);
|
||||
}
|
||||
|
||||
const result = limiter.checkLimit(testIp);
|
||||
|
||||
// Then: 101st request should be rate limited
|
||||
assert.equal(result.allowed, false, 'Should not allow 101st request');
|
||||
assert.equal(result.statusCode, 429, 'Should return 429 status');
|
||||
assert.ok(result.retryAfter > 0, 'Should include Retry-After in seconds');
|
||||
assert.ok(result.retryAfter <= 60, 'Retry-After should be <= 60 seconds');
|
||||
});
|
||||
|
||||
it('T050: should track requests per IP independently', () => {
|
||||
// Given: Rate limiter and multiple IPs
|
||||
const limiter = new RateLimiter(100, 60000);
|
||||
const ip1 = '192.168.1.1';
|
||||
const ip2 = '192.168.1.2';
|
||||
|
||||
// When: Making 100 requests from each IP
|
||||
for (let i = 0; i < 100; i++) {
|
||||
limiter.checkLimit(ip1);
|
||||
limiter.checkLimit(ip2);
|
||||
}
|
||||
|
||||
// Then: Both IPs should still be allowed (independent limits)
|
||||
const result1 = limiter.checkLimit(ip1);
|
||||
const result2 = limiter.checkLimit(ip2);
|
||||
|
||||
assert.equal(result1.allowed, false, 'IP1 should be rate limited');
|
||||
assert.equal(result2.allowed, false, 'IP2 should be rate limited');
|
||||
});
|
||||
|
||||
it('T050: should cleanup old entries outside time window', () => {
|
||||
// Given: Rate limiter with short window
|
||||
const limiter = new RateLimiter(10, 1000); // 10 req/sec for testing
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Making requests then cleaning up
|
||||
for (let i = 0; i < 10; i++) {
|
||||
limiter.checkLimit(testIp);
|
||||
}
|
||||
|
||||
// Wait for window to pass (simulate with manual cleanup)
|
||||
limiter.cleanup();
|
||||
|
||||
// Then: Should have entries in map
|
||||
assert.ok(limiter.requests.has(testIp), 'Should have IP in requests map');
|
||||
});
|
||||
|
||||
it('T050: should reset limit after time window expires', () => {
|
||||
// Given: Rate limiter with very short window
|
||||
const limiter = new RateLimiter(5, 100); // 5 req / 100ms
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Filling up limit
|
||||
for (let i = 0; i < 5; i++) {
|
||||
limiter.checkLimit(testIp);
|
||||
}
|
||||
|
||||
// Simulate time passing by manipulating timestamps
|
||||
const oldTimestamps = limiter.requests.get(testIp);
|
||||
const expiredTimestamps = oldTimestamps.map(ts => ts - 200); // Make them 200ms old
|
||||
limiter.requests.set(testIp, expiredTimestamps);
|
||||
|
||||
// Then: New request should be allowed after window
|
||||
const result = limiter.checkLimit(testIp);
|
||||
assert.equal(result.allowed, true, 'Should allow request after window expires');
|
||||
});
|
||||
});
|
||||
@@ -1,386 +0,0 @@
|
||||
/**
|
||||
* Unit Tests: Sitemap Generation Logic
|
||||
*
|
||||
* Tests sitemap XML generation functions
|
||||
* Tests T028, T029, T030
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
describe('Unit: escapeXml() (T028)', () => {
|
||||
|
||||
// Mock XML escape function (will be in proxy.js)
|
||||
function escapeXml(str) {
|
||||
if (typeof str !== 'string') return '';
|
||||
|
||||
return str
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
it('T028: should escape < character to <', () => {
|
||||
// Given: String with < character
|
||||
const input = 'test < value';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape <
|
||||
assert.equal(output, 'test < value', 'Should escape <');
|
||||
});
|
||||
|
||||
it('T028: should escape > character to >', () => {
|
||||
// Given: String with > character
|
||||
const input = 'test > value';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape >
|
||||
assert.equal(output, 'test > value', 'Should escape >');
|
||||
});
|
||||
|
||||
it('T028: should escape & character to &', () => {
|
||||
// Given: String with & character
|
||||
const input = 'test & value';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape &
|
||||
assert.equal(output, 'test & value', 'Should escape &');
|
||||
});
|
||||
|
||||
it('T028: should escape " character to "', () => {
|
||||
// Given: String with " character
|
||||
const input = 'test "value"';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape "
|
||||
assert.equal(output, 'test "value"', 'Should escape "');
|
||||
});
|
||||
|
||||
it('T028: should escape \' character to '', () => {
|
||||
// Given: String with ' character
|
||||
const input = "test 'value'";
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape '
|
||||
assert.equal(output, 'test 'value'', 'Should escape \'');
|
||||
});
|
||||
|
||||
it('T028: should escape multiple special characters in correct order', () => {
|
||||
// Given: String with multiple special characters
|
||||
const input = '<tag attr="value" other=\'test\'>content & more</tag>';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape all characters properly
|
||||
assert.equal(
|
||||
output,
|
||||
'<tag attr="value" other='test'>content & more</tag>',
|
||||
'Should escape all XML special characters'
|
||||
);
|
||||
});
|
||||
|
||||
it('T028: should handle strings without special characters', () => {
|
||||
// Given: String without special characters
|
||||
const input = 'normal text 123';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should return unchanged
|
||||
assert.equal(output, input, 'Should not modify strings without special chars');
|
||||
});
|
||||
|
||||
it('T028: should handle empty string', () => {
|
||||
// Given: Empty string
|
||||
const input = '';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should return empty string
|
||||
assert.equal(output, '', 'Should handle empty string');
|
||||
});
|
||||
|
||||
it('T028: should handle non-string input gracefully', () => {
|
||||
// Given: Non-string inputs
|
||||
const inputs = [null, undefined, 123, { foo: 'bar' }];
|
||||
|
||||
// When: Escaping each input
|
||||
// Then: Should return empty string for non-strings
|
||||
inputs.forEach(input => {
|
||||
const output = escapeXml(input);
|
||||
assert.equal(output, '', `Should return empty string for ${typeof input}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: formatSitemapEntry() (T029)', () => {
|
||||
|
||||
// Mock sitemap entry formatter (will be in proxy.js)
|
||||
function formatSitemapEntry(document, baseUrl) {
|
||||
function escapeXml(str) {
|
||||
return str.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
}
|
||||
|
||||
const loc = `${baseUrl}/${document.id}`;
|
||||
const lastmod = document.modifiedTime;
|
||||
|
||||
return ` <url>
|
||||
<loc>${escapeXml(loc)}</loc>
|
||||
<lastmod>${lastmod}</lastmod>
|
||||
</url>`;
|
||||
}
|
||||
|
||||
it('T029: should convert DriveDocument to XML url element', () => {
|
||||
// Given: DriveDocument metadata
|
||||
const document = {
|
||||
id: '1BxAA_test123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Should generate valid XML
|
||||
assert.ok(xml.includes('<url>'), 'Should contain opening url tag');
|
||||
assert.ok(xml.includes('</url>'), 'Should contain closing url tag');
|
||||
assert.ok(xml.includes('<loc>'), 'Should contain loc element');
|
||||
assert.ok(xml.includes('</loc>'), 'Should contain closing loc tag');
|
||||
assert.ok(xml.includes('<lastmod>'), 'Should contain lastmod element');
|
||||
assert.ok(xml.includes('</lastmod>'), 'Should contain closing lastmod tag');
|
||||
});
|
||||
|
||||
it('T029: should include correct location URL with documentId', () => {
|
||||
// Given: DriveDocument metadata
|
||||
const document = {
|
||||
id: '1BxAA_test123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Location should point to adapter endpoint
|
||||
assert.ok(
|
||||
xml.includes(`<loc>http://localhost:3000/${document.id}</loc>`),
|
||||
'Should include correct location URL'
|
||||
);
|
||||
});
|
||||
|
||||
it('T029: should include ISO 8601 lastmod timestamp', () => {
|
||||
// Given: DriveDocument with modified time
|
||||
const document = {
|
||||
id: '1BxAA_test123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Should include lastmod with ISO 8601 timestamp
|
||||
assert.ok(
|
||||
xml.includes('<lastmod>2026-03-06T10:30:00Z</lastmod>'),
|
||||
'Should include ISO 8601 lastmod timestamp'
|
||||
);
|
||||
});
|
||||
|
||||
it('T029: should escape special XML characters in URL', () => {
|
||||
// Given: DriveDocument with special characters in ID (edge case)
|
||||
const document = {
|
||||
id: '1BxAA-test&123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Should escape & in URL
|
||||
assert.ok(
|
||||
xml.includes('&'),
|
||||
'Should escape special XML characters in URL'
|
||||
);
|
||||
});
|
||||
|
||||
it('T029: should handle different baseUrl formats', () => {
|
||||
// Given: Different baseUrl formats
|
||||
const document = {
|
||||
id: '1BxAA_test',
|
||||
name: 'Test',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
|
||||
const baseUrls = [
|
||||
'http://localhost:3000',
|
||||
'https://example.com',
|
||||
'https://api.example.com/v1'
|
||||
];
|
||||
|
||||
// When: Formatting with each baseUrl
|
||||
// Then: Should generate correct loc for each
|
||||
baseUrls.forEach(baseUrl => {
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
assert.ok(
|
||||
xml.includes(`<loc>${baseUrl}/${document.id}</loc>`),
|
||||
`Should work with baseUrl: ${baseUrl}`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: generateSitemap() Structure (T030)', () => {
|
||||
|
||||
// Mock sitemap generator structure (will be in proxy.js)
|
||||
function buildSitemapXml(documents, baseUrl) {
|
||||
function escapeXml(str) {
|
||||
return str.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
}
|
||||
|
||||
let xml = '<?xml version="1.0" encoding="UTF-8"?>\n';
|
||||
xml += '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n';
|
||||
|
||||
documents.forEach(doc => {
|
||||
const loc = `${baseUrl}/${doc.id}`;
|
||||
xml += ` <url>\n`;
|
||||
xml += ` <loc>${escapeXml(loc)}</loc>\n`;
|
||||
xml += ` <lastmod>${doc.modifiedTime}</lastmod>\n`;
|
||||
xml += ` </url>\n`;
|
||||
});
|
||||
|
||||
xml += '</urlset>';
|
||||
|
||||
return xml;
|
||||
}
|
||||
|
||||
it('T030: should build complete XML with declaration', () => {
|
||||
// Given: Array of documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should start with XML declaration
|
||||
assert.ok(
|
||||
xml.startsWith('<?xml version="1.0"'),
|
||||
'Should start with XML declaration'
|
||||
);
|
||||
});
|
||||
|
||||
it('T030: should include correct sitemap namespace', () => {
|
||||
// Given: Array of documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should include sitemap protocol namespace
|
||||
assert.ok(
|
||||
xml.includes('<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'),
|
||||
'Should include correct sitemap namespace'
|
||||
);
|
||||
});
|
||||
|
||||
it('T030: should include closing urlset tag', () => {
|
||||
// Given: Array of documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should end with closing urlset tag
|
||||
assert.ok(xml.endsWith('</urlset>'), 'Should end with closing urlset tag');
|
||||
});
|
||||
|
||||
it('T030: should include multiple url entries for multiple documents', () => {
|
||||
// Given: Multiple documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' },
|
||||
{ id: '2CyBB_doc2', name: 'Doc 2', modifiedTime: '2026-03-06T11:00:00Z' },
|
||||
{ id: '3DzCC_doc3', name: 'Doc 3', modifiedTime: '2026-03-06T12:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should include all documents
|
||||
const urlCount = (xml.match(/<url>/g) || []).length;
|
||||
assert.equal(urlCount, 3, 'Should include 3 url entries');
|
||||
|
||||
// Then: Each document should have its loc
|
||||
documents.forEach(doc => {
|
||||
assert.ok(
|
||||
xml.includes(`<loc>http://localhost:3000/${doc.id}</loc>`),
|
||||
`Should include url entry for ${doc.id}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('T030: should handle empty document list', () => {
|
||||
// Given: Empty documents array
|
||||
const documents = [];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should still have valid XML structure
|
||||
assert.ok(xml.includes('<?xml version'), 'Should have XML declaration');
|
||||
assert.ok(xml.includes('<urlset'), 'Should have urlset opening');
|
||||
assert.ok(xml.includes('</urlset>'), 'Should have urlset closing');
|
||||
|
||||
// Then: Should have no url entries
|
||||
const urlCount = (xml.match(/<url>/g) || []).length;
|
||||
assert.equal(urlCount, 0, 'Should have no url entries');
|
||||
});
|
||||
|
||||
it('T030: should generate valid XML that browsers can parse', () => {
|
||||
// Given: Sample documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_test', name: 'Test', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: XML should be well-formed (basic checks)
|
||||
// Count opening and closing tags
|
||||
const openingUrlset = (xml.match(/<urlset/g) || []).length;
|
||||
const closingUrlset = (xml.match(/<\/urlset>/g) || []).length;
|
||||
assert.equal(openingUrlset, closingUrlset, 'urlset tags should be balanced');
|
||||
|
||||
const openingUrl = (xml.match(/<url>/g) || []).length;
|
||||
const closingUrl = (xml.match(/<\/url>/g) || []).length;
|
||||
assert.equal(openingUrl, closingUrl, 'url tags should be balanced');
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user