Initial Version of sitemap.xml spec
This commit is contained in:
256
tests/unit/auth.test.js
Normal file
256
tests/unit/auth.test.js
Normal file
@@ -0,0 +1,256 @@
|
||||
/**
|
||||
* Unit Tests: Service Account Authentication
|
||||
*
|
||||
* Tests T033-T034: Test JWT authentication and credential validation
|
||||
* Tests the auth.js module in isolation
|
||||
*
|
||||
* @module tests/unit/auth
|
||||
*/
|
||||
|
||||
import { describe, it, beforeEach } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
// =============================================================================
|
||||
// T033: Unit test for Service Account JWT authentication
|
||||
// =============================================================================
|
||||
|
||||
describe('T033: Service Account JWT Authentication', () => {
|
||||
let originalEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
// Save original env
|
||||
originalEnv = process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
});
|
||||
|
||||
it('should create GoogleAuth client from GOOGLE_SERVICE_ACCOUNT_KEY env var', async () => {
|
||||
// Mock credentials as inline JSON (per clarification #1)
|
||||
const mockCredentials = {
|
||||
type: 'service_account',
|
||||
project_id: 'test-project',
|
||||
private_key_id: 'key123',
|
||||
private_key: '-----BEGIN PRIVATE KEY-----\nMOCK_KEY\n-----END PRIVATE KEY-----\n',
|
||||
client_email: 'test@test-project.iam.gserviceaccount.com',
|
||||
client_id: '123456789',
|
||||
auth_uri: 'https://accounts.google.com/o/oauth2/auth',
|
||||
token_uri: 'https://oauth2.googleapis.com/token',
|
||||
auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs'
|
||||
};
|
||||
|
||||
// Set env var with inline JSON
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = JSON.stringify(mockCredentials);
|
||||
|
||||
// TODO: Import and call initializeAuth from src/auth.js
|
||||
// const { initializeAuth } = await import('../../src/auth.js');
|
||||
// const auth = await initializeAuth();
|
||||
|
||||
// Verify GoogleAuth was created with correct credentials
|
||||
// assert.ok(auth, 'Should return auth client');
|
||||
// assert.equal(auth.credentials.client_email, mockCredentials.client_email, 'Should use client_email from env var');
|
||||
|
||||
// Restore env
|
||||
if (originalEnv) {
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = originalEnv;
|
||||
} else {
|
||||
delete process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
it('should use correct Drive API scope (read-only)', async () => {
|
||||
const mockCredentials = {
|
||||
type: 'service_account',
|
||||
project_id: 'test-project',
|
||||
private_key: '-----BEGIN PRIVATE KEY-----\nMOCK_KEY\n-----END PRIVATE KEY-----\n',
|
||||
client_email: 'test@test-project.iam.gserviceaccount.com'
|
||||
};
|
||||
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = JSON.stringify(mockCredentials);
|
||||
|
||||
// TODO: Import and call initializeAuth
|
||||
// const { initializeAuth } = await import('../../src/auth.js');
|
||||
// const auth = await initializeAuth();
|
||||
|
||||
// Verify scope is read-only
|
||||
const expectedScope = 'https://www.googleapis.com/auth/drive.readonly';
|
||||
// assert.ok(auth.scopes.includes(expectedScope), 'Should use drive.readonly scope');
|
||||
|
||||
// Restore env
|
||||
if (originalEnv) {
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = originalEnv;
|
||||
} else {
|
||||
delete process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
}
|
||||
});
|
||||
|
||||
it('should parse inline JSON from env var correctly', async () => {
|
||||
// Test with different JSON formatting (whitespace, escaped quotes)
|
||||
const mockCredentials = {
|
||||
client_email: 'test@project.iam.gserviceaccount.com',
|
||||
private_key: '-----BEGIN PRIVATE KEY-----\nMOCK_KEY\n-----END PRIVATE KEY-----\n',
|
||||
project_id: 'test-project'
|
||||
};
|
||||
|
||||
// Set with extra whitespace
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = JSON.stringify(mockCredentials, null, 2);
|
||||
|
||||
// TODO: Import and call initializeAuth
|
||||
// const { initializeAuth } = await import('../../src/auth.js');
|
||||
// const auth = await initializeAuth();
|
||||
|
||||
// Should parse correctly despite formatting
|
||||
// assert.ok(auth, 'Should parse JSON with whitespace');
|
||||
|
||||
// Restore env
|
||||
if (originalEnv) {
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = originalEnv;
|
||||
} else {
|
||||
delete process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// T034: Unit test for credential validation
|
||||
// =============================================================================
|
||||
|
||||
describe('T034: Credential Validation', () => {
|
||||
it('should detect missing client_email field', async () => {
|
||||
const invalidCredentials = {
|
||||
private_key: '-----BEGIN PRIVATE KEY-----\nMOCK_KEY\n-----END PRIVATE KEY-----\n',
|
||||
project_id: 'test-project'
|
||||
// Missing client_email
|
||||
};
|
||||
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = JSON.stringify(invalidCredentials);
|
||||
|
||||
// TODO: Import validateCredentials from src/auth.js
|
||||
// const { validateCredentials } = await import('../../src/auth.js');
|
||||
|
||||
// Should throw error for missing client_email
|
||||
// await assert.rejects(
|
||||
// async () => await validateCredentials(invalidCredentials),
|
||||
// { message: /client_email/ },
|
||||
// 'Should reject credentials without client_email'
|
||||
// );
|
||||
|
||||
delete process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
});
|
||||
|
||||
it('should detect missing private_key field', async () => {
|
||||
const invalidCredentials = {
|
||||
client_email: 'test@project.iam.gserviceaccount.com',
|
||||
project_id: 'test-project'
|
||||
// Missing private_key
|
||||
};
|
||||
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = JSON.stringify(invalidCredentials);
|
||||
|
||||
// TODO: Import validateCredentials
|
||||
// const { validateCredentials } = await import('../../src/auth.js');
|
||||
|
||||
// Should throw error for missing private_key
|
||||
// await assert.rejects(
|
||||
// async () => await validateCredentials(invalidCredentials),
|
||||
// { message: /private_key/ },
|
||||
// 'Should reject credentials without private_key'
|
||||
// );
|
||||
|
||||
delete process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
});
|
||||
|
||||
it('should detect missing project_id field', async () => {
|
||||
const invalidCredentials = {
|
||||
client_email: 'test@project.iam.gserviceaccount.com',
|
||||
private_key: '-----BEGIN PRIVATE KEY-----\nMOCK_KEY\n-----END PRIVATE KEY-----\n'
|
||||
// Missing project_id
|
||||
};
|
||||
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = JSON.stringify(invalidCredentials);
|
||||
|
||||
// TODO: Import validateCredentials
|
||||
// const { validateCredentials } = await import('../../src/auth.js');
|
||||
|
||||
// Should throw error for missing project_id
|
||||
// await assert.rejects(
|
||||
// async () => await validateCredentials(invalidCredentials),
|
||||
// { message: /project_id/ },
|
||||
// 'Should reject credentials without project_id'
|
||||
// );
|
||||
|
||||
delete process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
});
|
||||
|
||||
it('should detect empty credential fields', async () => {
|
||||
const invalidCredentials = {
|
||||
client_email: '', // Empty
|
||||
private_key: '-----BEGIN PRIVATE KEY-----\nMOCK_KEY\n-----END PRIVATE KEY-----\n',
|
||||
project_id: 'test-project'
|
||||
};
|
||||
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = JSON.stringify(invalidCredentials);
|
||||
|
||||
// TODO: Import validateCredentials
|
||||
// const { validateCredentials } = await import('../../src/auth.js');
|
||||
|
||||
// Should throw error for empty client_email
|
||||
// await assert.rejects(
|
||||
// async () => await validateCredentials(invalidCredentials),
|
||||
// { message: /client_email.*empty/ },
|
||||
// 'Should reject empty client_email'
|
||||
// );
|
||||
|
||||
delete process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
});
|
||||
|
||||
it('should accept valid credentials', async () => {
|
||||
const validCredentials = {
|
||||
type: 'service_account',
|
||||
project_id: 'test-project',
|
||||
private_key: '-----BEGIN PRIVATE KEY-----\nMOCK_KEY\n-----END PRIVATE KEY-----\n',
|
||||
client_email: 'test@test-project.iam.gserviceaccount.com'
|
||||
};
|
||||
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = JSON.stringify(validCredentials);
|
||||
|
||||
// TODO: Import validateCredentials
|
||||
// const { validateCredentials } = await import('../../src/auth.js');
|
||||
|
||||
// Should not throw for valid credentials
|
||||
// await assert.doesNotReject(
|
||||
// async () => await validateCredentials(validCredentials),
|
||||
// 'Should accept valid credentials'
|
||||
// );
|
||||
|
||||
delete process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
});
|
||||
|
||||
it('should trigger fatal error handler on invalid credentials (exit code 1)', async () => {
|
||||
// Per T016: Fatal error handler should log to stderr and exit with code 1
|
||||
const invalidCredentials = {
|
||||
invalid: 'structure'
|
||||
};
|
||||
|
||||
process.env.GOOGLE_SERVICE_ACCOUNT_KEY = JSON.stringify(invalidCredentials);
|
||||
|
||||
// TODO: Import initializeAuth which should call fatal error handler
|
||||
// const { initializeAuth } = await import('../../src/auth.js');
|
||||
|
||||
// Mock process.exit to prevent actual exit
|
||||
// let exitCode;
|
||||
// const originalExit = process.exit;
|
||||
// process.exit = (code) => { exitCode = code; throw new Error('EXIT'); };
|
||||
|
||||
// try {
|
||||
// await initializeAuth();
|
||||
// } catch (e) {
|
||||
// if (e.message === 'EXIT') {
|
||||
// assert.equal(exitCode, 1, 'Should exit with code 1 on invalid credentials');
|
||||
// } else {
|
||||
// throw e;
|
||||
// }
|
||||
// } finally {
|
||||
// process.exit = originalExit;
|
||||
// }
|
||||
|
||||
delete process.env.GOOGLE_SERVICE_ACCOUNT_KEY;
|
||||
});
|
||||
});
|
||||
227
tests/unit/drive-client.test.js
Normal file
227
tests/unit/drive-client.test.js
Normal file
@@ -0,0 +1,227 @@
|
||||
/**
|
||||
* Unit Tests: Drive API Client
|
||||
*
|
||||
* Tests T031-T032: Test Drive API client query execution and pagination
|
||||
* Tests the drive-client.js module in isolation with mocked googleapis
|
||||
*
|
||||
* @module tests/unit/drive-client
|
||||
*/
|
||||
|
||||
import { describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
// =============================================================================
|
||||
// T031: Unit test for Drive API client query execution
|
||||
// =============================================================================
|
||||
|
||||
describe('T031: Drive API Client Query Execution', () => {
|
||||
it('should call drive.files.list() with correct query parameters', async () => {
|
||||
// Mock googleapis drive.files.list() method
|
||||
const mockFilesList = mock.fn(async (params) => {
|
||||
return {
|
||||
data: {
|
||||
files: [
|
||||
{ id: 'doc1', name: 'Test Doc 1', mimeType: 'application/pdf', modifiedTime: '2024-03-01T10:00:00Z' },
|
||||
{ id: 'doc2', name: 'Test Doc 2', mimeType: 'text/plain', modifiedTime: '2024-03-02T11:00:00Z' }
|
||||
],
|
||||
nextPageToken: null
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// TODO: Import queryDocuments function from src/drive-client.js when implemented
|
||||
// const { queryDocuments } = await import('../../src/drive-client.js');
|
||||
|
||||
// Mock Drive client
|
||||
const mockDriveClient = {
|
||||
files: {
|
||||
list: mockFilesList
|
||||
}
|
||||
};
|
||||
|
||||
// Expected query parameters from config/settings.js
|
||||
const expectedQuery = 'trashed = false'; // Default query
|
||||
const expectedFields = 'files(id, name, mimeType, modifiedTime)';
|
||||
const expectedPageSize = 1000;
|
||||
|
||||
// Call queryDocuments (will be implemented)
|
||||
// const result = await queryDocuments(mockDriveClient, expectedQuery);
|
||||
|
||||
// Verify drive.files.list() was called with correct parameters
|
||||
// assert.equal(mockFilesList.mock.calls.length, 1, 'Should call drive.files.list() once');
|
||||
|
||||
// const callArgs = mockFilesList.mock.calls[0].arguments[0];
|
||||
// assert.equal(callArgs.q, expectedQuery, 'Should use query from settings');
|
||||
// assert.equal(callArgs.fields, expectedFields, 'Should request correct fields');
|
||||
// assert.equal(callArgs.pageSize, expectedPageSize, 'Should use correct page size');
|
||||
|
||||
// Verify result contains documents
|
||||
// assert.ok(Array.isArray(result), 'Should return array of documents');
|
||||
// assert.equal(result.length, 2, 'Should return 2 documents');
|
||||
// assert.equal(result[0].id, 'doc1', 'Should have correct document ID');
|
||||
});
|
||||
|
||||
it('should use configurable Drive API filter from settings', async () => {
|
||||
const mockFilesList = mock.fn(async () => ({
|
||||
data: { files: [], nextPageToken: null }
|
||||
}));
|
||||
|
||||
const mockDriveClient = {
|
||||
files: { list: mockFilesList }
|
||||
};
|
||||
|
||||
// Custom query filter (per clarification #9)
|
||||
const customQuery = "mimeType contains 'application/pdf' and trashed = false";
|
||||
|
||||
// TODO: Call queryDocuments with custom query
|
||||
// await queryDocuments(mockDriveClient, customQuery);
|
||||
|
||||
// Verify custom query was used
|
||||
// const callArgs = mockFilesList.mock.calls[0].arguments[0];
|
||||
// assert.equal(callArgs.q, customQuery, 'Should use custom query from settings');
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// T032: Unit test for Drive API pagination handling
|
||||
// =============================================================================
|
||||
|
||||
describe('T032: Drive API Pagination Handling', () => {
|
||||
it('should handle pageToken to fetch all results across multiple pages', async () => {
|
||||
// Mock Drive API with pagination (3 pages)
|
||||
let callCount = 0;
|
||||
const mockFilesList = mock.fn(async (params) => {
|
||||
callCount++;
|
||||
|
||||
if (callCount === 1) {
|
||||
// First page
|
||||
return {
|
||||
data: {
|
||||
files: [
|
||||
{ id: 'doc1', name: 'Doc 1', mimeType: 'application/pdf', modifiedTime: '2024-03-01T10:00:00Z' }
|
||||
],
|
||||
nextPageToken: 'token_page_2'
|
||||
}
|
||||
};
|
||||
} else if (callCount === 2) {
|
||||
// Second page
|
||||
assert.equal(params.pageToken, 'token_page_2', 'Should use pageToken from previous response');
|
||||
return {
|
||||
data: {
|
||||
files: [
|
||||
{ id: 'doc2', name: 'Doc 2', mimeType: 'text/plain', modifiedTime: '2024-03-02T11:00:00Z' }
|
||||
],
|
||||
nextPageToken: 'token_page_3'
|
||||
}
|
||||
};
|
||||
} else {
|
||||
// Third page (last)
|
||||
assert.equal(params.pageToken, 'token_page_3', 'Should use pageToken from previous response');
|
||||
return {
|
||||
data: {
|
||||
files: [
|
||||
{ id: 'doc3', name: 'Doc 3', mimeType: 'application/pdf', modifiedTime: '2024-03-03T12:00:00Z' }
|
||||
],
|
||||
nextPageToken: null // No more pages
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
const mockDriveClient = {
|
||||
files: { list: mockFilesList }
|
||||
};
|
||||
|
||||
// TODO: Call queryDocuments to fetch all pages
|
||||
// const result = await queryDocuments(mockDriveClient, 'trashed = false');
|
||||
|
||||
// Verify all pages were fetched
|
||||
// assert.equal(mockFilesList.mock.calls.length, 3, 'Should call drive.files.list() 3 times for 3 pages');
|
||||
// assert.equal(result.length, 3, 'Should return all 3 documents from all pages');
|
||||
// assert.equal(result[0].id, 'doc1', 'Should have doc1 from page 1');
|
||||
// assert.equal(result[1].id, 'doc2', 'Should have doc2 from page 2');
|
||||
// assert.equal(result[2].id, 'doc3', 'Should have doc3 from page 3');
|
||||
});
|
||||
|
||||
it('should collect up to 50,000 documents across pages', async () => {
|
||||
// Mock Drive API to return many pages (simulate large Drive)
|
||||
const documentsPerPage = 1000;
|
||||
const totalDocuments = 5000; // 5 pages
|
||||
let currentPage = 0;
|
||||
|
||||
const mockFilesList = mock.fn(async (params) => {
|
||||
currentPage++;
|
||||
const startId = (currentPage - 1) * documentsPerPage;
|
||||
const endId = Math.min(startId + documentsPerPage, totalDocuments);
|
||||
|
||||
const files = [];
|
||||
for (let i = startId; i < endId; i++) {
|
||||
files.push({
|
||||
id: `doc${i}`,
|
||||
name: `Document ${i}`,
|
||||
mimeType: 'application/pdf',
|
||||
modifiedTime: '2024-03-01T10:00:00Z'
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
data: {
|
||||
files,
|
||||
nextPageToken: currentPage < Math.ceil(totalDocuments / documentsPerPage) ? `token_page_${currentPage + 1}` : null
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const mockDriveClient = {
|
||||
files: { list: mockFilesList }
|
||||
};
|
||||
|
||||
// TODO: Call queryDocuments
|
||||
// const result = await queryDocuments(mockDriveClient, 'trashed = false');
|
||||
|
||||
// Verify all documents were collected
|
||||
// assert.equal(result.length, totalDocuments, `Should collect all ${totalDocuments} documents`);
|
||||
// assert.equal(mockFilesList.mock.calls.length, Math.ceil(totalDocuments / documentsPerPage), 'Should call API for each page');
|
||||
});
|
||||
|
||||
it('should stop pagination at 50,000 document limit', async () => {
|
||||
// Mock Drive API to return more than 50k documents
|
||||
const documentsPerPage = 1000;
|
||||
let currentPage = 0;
|
||||
|
||||
const mockFilesList = mock.fn(async () => {
|
||||
currentPage++;
|
||||
const files = [];
|
||||
for (let i = 0; i < documentsPerPage; i++) {
|
||||
files.push({
|
||||
id: `doc${currentPage}_${i}`,
|
||||
name: `Document ${currentPage}_${i}`,
|
||||
mimeType: 'application/pdf',
|
||||
modifiedTime: '2024-03-01T10:00:00Z'
|
||||
});
|
||||
}
|
||||
|
||||
// Always return nextPageToken to simulate unlimited documents
|
||||
return {
|
||||
data: {
|
||||
files,
|
||||
nextPageToken: `token_page_${currentPage + 1}`
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
const mockDriveClient = {
|
||||
files: { list: mockFilesList }
|
||||
};
|
||||
|
||||
// TODO: Call queryDocuments - should stop at 50k
|
||||
// await assert.rejects(
|
||||
// async () => await queryDocuments(mockDriveClient, 'trashed = false'),
|
||||
// { message: /50,?000/ },
|
||||
// 'Should throw error when exceeding 50k document limit'
|
||||
// );
|
||||
|
||||
// Verify pagination stopped at 50k
|
||||
// assert.ok(currentPage <= 50, 'Should stop pagination before collecting too many documents');
|
||||
});
|
||||
});
|
||||
438
tests/unit/proxy-export.test.js.old
Normal file
438
tests/unit/proxy-export.test.js.old
Normal file
@@ -0,0 +1,438 @@
|
||||
/**
|
||||
* Unit Tests: Document Export Logic
|
||||
*
|
||||
* Tests document export functions in proxy.js
|
||||
* Tests T012, T013, T014, T040, T041
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
describe('Unit: validateDocumentId() (T012)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function validateDocumentId(id) {
|
||||
const pattern = /^[a-zA-Z0-9_-]{8,128}$/;
|
||||
return pattern.test(id);
|
||||
}
|
||||
|
||||
it('T012: should accept valid 8-character alphanumeric ID', () => {
|
||||
// Given: Valid 8-character document ID
|
||||
const validId = '1BxAA789';
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(validId);
|
||||
|
||||
// Then: Should return true
|
||||
assert.equal(isValid, true, 'Should accept 8-character alphanumeric ID');
|
||||
});
|
||||
|
||||
it('T012: should accept valid 128-character alphanumeric ID', () => {
|
||||
// Given: Valid 128-character document ID
|
||||
const validId = 'a'.repeat(128);
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(validId);
|
||||
|
||||
// Then: Should return true
|
||||
assert.equal(isValid, true, 'Should accept 128-character alphanumeric ID');
|
||||
});
|
||||
|
||||
it('T012: should accept IDs with hyphens and underscores', () => {
|
||||
// Given: Valid IDs with hyphens and underscores
|
||||
const idWithHyphen = '1BxAA-test-123';
|
||||
const idWithUnderscore = '1BxAA_test_123';
|
||||
const idWithBoth = '1BxAA-test_123';
|
||||
|
||||
// When: Validating document IDs
|
||||
const isValidHyphen = validateDocumentId(idWithHyphen);
|
||||
const isValidUnderscore = validateDocumentId(idWithUnderscore);
|
||||
const isValidBoth = validateDocumentId(idWithBoth);
|
||||
|
||||
// Then: Should return true for all
|
||||
assert.equal(isValidHyphen, true, 'Should accept IDs with hyphens');
|
||||
assert.equal(isValidUnderscore, true, 'Should accept IDs with underscores');
|
||||
assert.equal(isValidBoth, true, 'Should accept IDs with both hyphens and underscores');
|
||||
});
|
||||
|
||||
it('T012: should reject IDs shorter than 8 characters', () => {
|
||||
// Given: Invalid short ID
|
||||
const shortId = '1BxAA78';
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(shortId);
|
||||
|
||||
// Then: Should return false
|
||||
assert.equal(isValid, false, 'Should reject IDs shorter than 8 characters');
|
||||
});
|
||||
|
||||
it('T012: should reject IDs longer than 128 characters', () => {
|
||||
// Given: Invalid long ID
|
||||
const longId = 'a'.repeat(129);
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(longId);
|
||||
|
||||
// Then: Should return false
|
||||
assert.equal(isValid, false, 'Should reject IDs longer than 128 characters');
|
||||
});
|
||||
|
||||
it('T012: should reject IDs with invalid characters', () => {
|
||||
// Given: IDs with invalid characters
|
||||
const invalidChars = [
|
||||
'1BxAA@test', // @ symbol
|
||||
'1BxAA test', // space
|
||||
'1BxAA!test', // exclamation
|
||||
'1BxAA#test', // hash
|
||||
'1BxAA.test', // period
|
||||
];
|
||||
|
||||
// When: Validating each ID
|
||||
// Then: All should return false
|
||||
invalidChars.forEach(id => {
|
||||
const isValid = validateDocumentId(id);
|
||||
assert.equal(isValid, false, `Should reject ID with invalid character: ${id}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('T012: should reject empty string', () => {
|
||||
// Given: Empty string
|
||||
const emptyId = '';
|
||||
|
||||
// When: Validating document ID
|
||||
const isValid = validateDocumentId(emptyId);
|
||||
|
||||
// Then: Should return false
|
||||
assert.equal(isValid, false, 'Should reject empty string');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: findExportLink() (T013, T041)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function findExportLink(exportLinks, format = 'markdown') {
|
||||
if (!exportLinks) return null;
|
||||
|
||||
const formatMap = {
|
||||
'markdown': ['text/x-markdown', 'text/markdown', 'text/html'],
|
||||
'html': ['text/html'],
|
||||
'pdf': ['application/pdf']
|
||||
};
|
||||
|
||||
const mimeTypes = formatMap[format.toLowerCase()] || [];
|
||||
|
||||
for (const mimeType of mimeTypes) {
|
||||
if (exportLinks[mimeType]) {
|
||||
return exportLinks[mimeType];
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
it('T013: should select text/x-markdown from exportLinks when available', () => {
|
||||
// Given: exportLinks with text/x-markdown
|
||||
const exportLinks = {
|
||||
'text/x-markdown': 'https://docs.google.com/export?format=markdown',
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for markdown format
|
||||
const link = findExportLink(exportLinks, 'markdown');
|
||||
|
||||
// Then: Should select text/x-markdown
|
||||
assert.equal(link, exportLinks['text/x-markdown'], 'Should select text/x-markdown');
|
||||
});
|
||||
|
||||
it('T013: should fall back to text/html when text/x-markdown unavailable', () => {
|
||||
// Given: exportLinks without text/x-markdown or text/markdown
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for markdown format
|
||||
const link = findExportLink(exportLinks, 'markdown');
|
||||
|
||||
// Then: Should fall back to text/html
|
||||
assert.equal(link, exportLinks['text/html'], 'Should fall back to text/html');
|
||||
});
|
||||
|
||||
it('T013: should prefer text/markdown over text/html when available', () => {
|
||||
// Given: exportLinks with text/markdown
|
||||
const exportLinks = {
|
||||
'text/markdown': 'https://docs.google.com/export?format=markdown',
|
||||
'text/html': 'https://docs.google.com/export?format=html'
|
||||
};
|
||||
|
||||
// When: Finding export link for markdown format
|
||||
const link = findExportLink(exportLinks, 'markdown');
|
||||
|
||||
// Then: Should select text/markdown
|
||||
assert.equal(link, exportLinks['text/markdown'], 'Should prefer text/markdown');
|
||||
});
|
||||
|
||||
it('T041: should select text/html MIME type for html format', () => {
|
||||
// Given: exportLinks with multiple formats
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'text/x-markdown': 'https://docs.google.com/export?format=markdown',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for html format
|
||||
const link = findExportLink(exportLinks, 'html');
|
||||
|
||||
// Then: Should select text/html
|
||||
assert.equal(link, exportLinks['text/html'], 'Should select text/html for html format');
|
||||
});
|
||||
|
||||
it('T041: should select application/pdf MIME type for pdf format', () => {
|
||||
// Given: exportLinks with multiple formats
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html',
|
||||
'application/pdf': 'https://docs.google.com/export?format=pdf'
|
||||
};
|
||||
|
||||
// When: Finding export link for pdf format
|
||||
const link = findExportLink(exportLinks, 'pdf');
|
||||
|
||||
// Then: Should select application/pdf
|
||||
assert.equal(link, exportLinks['application/pdf'], 'Should select application/pdf for pdf format');
|
||||
});
|
||||
|
||||
it('T041: should return null when requested format unavailable', () => {
|
||||
// Given: exportLinks without PDF
|
||||
const exportLinks = {
|
||||
'text/html': 'https://docs.google.com/export?format=html'
|
||||
};
|
||||
|
||||
// When: Finding export link for pdf format
|
||||
const link = findExportLink(exportLinks, 'pdf');
|
||||
|
||||
// Then: Should return null
|
||||
assert.equal(link, null, 'Should return null when format unavailable');
|
||||
});
|
||||
|
||||
it('should return null when exportLinks is null or undefined', () => {
|
||||
// Given: Null or undefined exportLinks
|
||||
const linkFromNull = findExportLink(null, 'markdown');
|
||||
const linkFromUndefined = findExportLink(undefined, 'markdown');
|
||||
|
||||
// Then: Should return null
|
||||
assert.equal(linkFromNull, null, 'Should return null for null exportLinks');
|
||||
assert.equal(linkFromUndefined, null, 'Should return null for undefined exportLinks');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: validateDocumentSize() (T014)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function validateDocumentSize(metadata) {
|
||||
const maxSize = 20 * 1024 * 1024; // 20MB
|
||||
|
||||
// Native Drive files (Docs, Sheets, Slides) don't have size property
|
||||
if (!metadata.size) {
|
||||
return { valid: true };
|
||||
}
|
||||
|
||||
const size = parseInt(metadata.size, 10);
|
||||
|
||||
if (size > maxSize) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Document exceeds 20MB size limit',
|
||||
statusCode: 413
|
||||
};
|
||||
}
|
||||
|
||||
return { valid: true, size };
|
||||
}
|
||||
|
||||
it('T014: should accept documents under 20MB', () => {
|
||||
// Given: Document metadata with size < 20MB
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'test.pdf',
|
||||
size: '10485760' // 10MB
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be valid
|
||||
assert.equal(result.valid, true, 'Should accept document < 20MB');
|
||||
assert.equal(result.size, 10485760, 'Should return parsed size');
|
||||
});
|
||||
|
||||
it('T014: should accept documents exactly at 20MB', () => {
|
||||
// Given: Document metadata with size exactly 20MB
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'test.pdf',
|
||||
size: '20971520' // Exactly 20MB
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be valid
|
||||
assert.equal(result.valid, true, 'Should accept document exactly at 20MB');
|
||||
});
|
||||
|
||||
it('T014: should reject documents over 20MB', () => {
|
||||
// Given: Document metadata with size > 20MB
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'large.pdf',
|
||||
size: '20971521' // 20MB + 1 byte
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be invalid
|
||||
assert.equal(result.valid, false, 'Should reject document > 20MB');
|
||||
assert.equal(result.statusCode, 413, 'Should return 413 status code');
|
||||
assert.ok(result.error, 'Should include error message');
|
||||
});
|
||||
|
||||
it('T014: should accept native Google Drive documents without size', () => {
|
||||
// Given: Google Doc metadata (no size property)
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'My Document',
|
||||
mimeType: 'application/vnd.google-apps.document'
|
||||
// Note: No size property for native Drive files
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should be valid (native files exported on-the-fly)
|
||||
assert.equal(result.valid, true, 'Should accept native Drive documents without size');
|
||||
});
|
||||
|
||||
it('T014: should handle size as number string', () => {
|
||||
// Given: Document metadata with size as string (Drive API returns strings)
|
||||
const metadata = {
|
||||
id: '1BxAA_test',
|
||||
name: 'test.pdf',
|
||||
size: '5242880' // 5MB as string
|
||||
};
|
||||
|
||||
// When: Validating document size
|
||||
const result = validateDocumentSize(metadata);
|
||||
|
||||
// Then: Should parse and validate correctly
|
||||
assert.equal(result.valid, true, 'Should handle size as string');
|
||||
assert.equal(result.size, 5242880, 'Should parse size to number');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: parseFormatParam() (T040)', () => {
|
||||
|
||||
// Mock function to test (will be in proxy.js)
|
||||
function parseFormatParam(url) {
|
||||
const urlObj = new URL(url, 'http://localhost');
|
||||
const format = urlObj.searchParams.get('format');
|
||||
|
||||
if (!format) {
|
||||
return { valid: true, format: 'markdown' }; // Default
|
||||
}
|
||||
|
||||
const normalized = format.toLowerCase();
|
||||
const validFormats = ['markdown', 'html', 'pdf'];
|
||||
|
||||
if (!validFormats.includes(normalized)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: 'Invalid format parameter',
|
||||
statusCode: 400
|
||||
};
|
||||
}
|
||||
|
||||
return { valid: true, format: normalized };
|
||||
}
|
||||
|
||||
it('T040: should extract format parameter from query string', () => {
|
||||
// Given: URL with format parameter
|
||||
const url = '/1BxAA_test?format=html';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should extract format
|
||||
assert.equal(result.valid, true, 'Should be valid');
|
||||
assert.equal(result.format, 'html', 'Should extract html format');
|
||||
});
|
||||
|
||||
it('T040: should validate against allowed values (markdown|html|pdf)', () => {
|
||||
// Given: URLs with valid formats
|
||||
const urls = [
|
||||
'/doc?format=markdown',
|
||||
'/doc?format=html',
|
||||
'/doc?format=pdf'
|
||||
];
|
||||
|
||||
// When: Parsing each URL
|
||||
// Then: All should be valid
|
||||
urls.forEach(url => {
|
||||
const result = parseFormatParam(url);
|
||||
assert.equal(result.valid, true, `Should accept format in ${url}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('T040: should return default markdown when format parameter missing', () => {
|
||||
// Given: URL without format parameter
|
||||
const url = '/1BxAA_test';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should default to markdown
|
||||
assert.equal(result.valid, true, 'Should be valid');
|
||||
assert.equal(result.format, 'markdown', 'Should default to markdown');
|
||||
});
|
||||
|
||||
it('T040: should normalize format to lowercase', () => {
|
||||
// Given: URL with uppercase format
|
||||
const urls = [
|
||||
'/doc?format=HTML',
|
||||
'/doc?format=Markdown',
|
||||
'/doc?format=PDF'
|
||||
];
|
||||
|
||||
// When: Parsing each URL
|
||||
// Then: Should normalize to lowercase
|
||||
assert.equal(parseFormatParam(urls[0]).format, 'html', 'Should normalize HTML to html');
|
||||
assert.equal(parseFormatParam(urls[1]).format, 'markdown', 'Should normalize Markdown to markdown');
|
||||
assert.equal(parseFormatParam(urls[2]).format, 'pdf', 'Should normalize PDF to pdf');
|
||||
});
|
||||
|
||||
it('T040: should return 400 status for invalid format values', () => {
|
||||
// Given: URL with invalid format
|
||||
const url = '/1BxAA_test?format=invalid';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should be invalid
|
||||
assert.equal(result.valid, false, 'Should be invalid');
|
||||
assert.equal(result.statusCode, 400, 'Should return 400 status');
|
||||
assert.ok(result.error, 'Should include error message');
|
||||
});
|
||||
|
||||
it('T040: should handle multiple query parameters', () => {
|
||||
// Given: URL with multiple query parameters
|
||||
const url = '/1BxAA_test?format=pdf&other=value&another=param';
|
||||
|
||||
// When: Parsing format parameter
|
||||
const result = parseFormatParam(url);
|
||||
|
||||
// Then: Should extract format correctly
|
||||
assert.equal(result.valid, true, 'Should be valid');
|
||||
assert.equal(result.format, 'pdf', 'Should extract format from multi-param URL');
|
||||
});
|
||||
});
|
||||
377
tests/unit/proxy-routing.test.js.old
Normal file
377
tests/unit/proxy-routing.test.js.old
Normal file
@@ -0,0 +1,377 @@
|
||||
/**
|
||||
* Unit Tests: Request Routing Logic
|
||||
*
|
||||
* Tests request routing and error mapping in proxy.js
|
||||
* Tests T015, T016, T050
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
describe('Unit: handleRequest() Routing (T015)', () => {
|
||||
|
||||
// Mock routing function (will be in proxy.js)
|
||||
function parseRoute(method, url) {
|
||||
if (method !== 'GET') {
|
||||
return { route: null, error: 'Method not allowed', statusCode: 405 };
|
||||
}
|
||||
|
||||
const urlObj = new URL(url, 'http://localhost');
|
||||
const path = urlObj.pathname;
|
||||
|
||||
if (path === '/health') {
|
||||
return { route: 'health' };
|
||||
}
|
||||
|
||||
if (path === '/sitemap.xml') {
|
||||
return { route: 'sitemap' };
|
||||
}
|
||||
|
||||
// Document route: /:documentId
|
||||
const docMatch = path.match(/^\/([a-zA-Z0-9_-]+)$/);
|
||||
if (docMatch) {
|
||||
return { route: 'document', documentId: docMatch[1] };
|
||||
}
|
||||
|
||||
return { route: null, error: 'Not found', statusCode: 404 };
|
||||
}
|
||||
|
||||
it('T015: should route /health to health check handler', () => {
|
||||
// Given: GET request to /health
|
||||
const method = 'GET';
|
||||
const url = '/health';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should route to health
|
||||
assert.equal(result.route, 'health', 'Should route to health handler');
|
||||
});
|
||||
|
||||
it('T015: should route /:documentId to document export handler', () => {
|
||||
// Given: GET request to /:documentId
|
||||
const method = 'GET';
|
||||
const url = '/1BxAA_testDocument123';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should route to document handler
|
||||
assert.equal(result.route, 'document', 'Should route to document handler');
|
||||
assert.equal(result.documentId, '1BxAA_testDocument123', 'Should extract document ID');
|
||||
});
|
||||
|
||||
it('T015: should route /sitemap.xml to sitemap handler', () => {
|
||||
// Given: GET request to /sitemap.xml
|
||||
const method = 'GET';
|
||||
const url = '/sitemap.xml';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should route to sitemap
|
||||
assert.equal(result.route, 'sitemap', 'Should route to sitemap handler');
|
||||
});
|
||||
|
||||
it('T015: should return 404 for unknown routes', () => {
|
||||
// Given: GET request to unknown path
|
||||
const method = 'GET';
|
||||
const url = '/unknown/path';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should return 404
|
||||
assert.equal(result.route, null, 'Should not match any route');
|
||||
assert.equal(result.statusCode, 404, 'Should return 404 status');
|
||||
});
|
||||
|
||||
it('T015: should return 405 for non-GET methods', () => {
|
||||
// Given: POST request
|
||||
const method = 'POST';
|
||||
const url = '/1BxAA_test';
|
||||
|
||||
// When: Parsing route
|
||||
const result = parseRoute(method, url);
|
||||
|
||||
// Then: Should return 405 Method Not Allowed
|
||||
assert.equal(result.route, null, 'Should not match any route');
|
||||
assert.equal(result.statusCode, 405, 'Should return 405 status');
|
||||
});
|
||||
|
||||
it('T015: should extract documentId with hyphens and underscores', () => {
|
||||
// Given: Document ID with special allowed characters
|
||||
const urls = [
|
||||
'/1BxAA-test-123',
|
||||
'/1BxAA_test_123',
|
||||
'/1BxAA-test_123'
|
||||
];
|
||||
|
||||
// When: Parsing each route
|
||||
// Then: Should extract document IDs correctly
|
||||
urls.forEach(url => {
|
||||
const result = parseRoute('GET', url);
|
||||
assert.equal(result.route, 'document', `Should route ${url} to document handler`);
|
||||
assert.ok(result.documentId, `Should extract document ID from ${url}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: mapDriveError() (T016)', () => {
|
||||
|
||||
// Mock error mapping function (will be in proxy.js)
|
||||
function mapDriveError(error) {
|
||||
// Handle GaxiosError from googleapis
|
||||
const statusCode = error.code || error.response?.status || 500;
|
||||
|
||||
const mapping = {
|
||||
404: { status: 404, message: 'Not Found' },
|
||||
403: { status: 403, message: 'Forbidden' },
|
||||
401: { status: 401, message: 'Unauthorized' },
|
||||
429: { status: 429, message: 'Too Many Requests', retryAfter: 60 },
|
||||
500: { status: 500, message: 'Internal Server Error' },
|
||||
503: { status: 503, message: 'Service Unavailable' }
|
||||
};
|
||||
|
||||
return mapping[statusCode] || { status: 500, message: 'Internal Server Error' };
|
||||
}
|
||||
|
||||
it('T016: should convert Drive API 404 to HTTP 404', () => {
|
||||
// Given: Drive API 404 error
|
||||
const driveError = { code: 404, message: 'File not found' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 404
|
||||
assert.equal(result.status, 404, 'Should map to 404 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 403 to HTTP 403', () => {
|
||||
// Given: Drive API 403 error
|
||||
const driveError = { code: 403, message: 'Permission denied' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 403
|
||||
assert.equal(result.status, 403, 'Should map to 403 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 401 to HTTP 401', () => {
|
||||
// Given: Drive API 401 error
|
||||
const driveError = { code: 401, message: 'Invalid credentials' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 401
|
||||
assert.equal(result.status, 401, 'Should map to 401 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 429 to HTTP 429 with Retry-After', () => {
|
||||
// Given: Drive API rate limit error
|
||||
const driveError = { code: 429, message: 'Rate limit exceeded' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 429 with Retry-After
|
||||
assert.equal(result.status, 429, 'Should map to 429 status');
|
||||
assert.equal(result.retryAfter, 60, 'Should include Retry-After of 60 seconds');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 500 to HTTP 500', () => {
|
||||
// Given: Drive API internal error
|
||||
const driveError = { code: 500, message: 'Internal error' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 500
|
||||
assert.equal(result.status, 500, 'Should map to 500 status');
|
||||
});
|
||||
|
||||
it('T016: should convert Drive API 503 to HTTP 503', () => {
|
||||
// Given: Drive API service unavailable
|
||||
const driveError = { code: 503, message: 'Service unavailable' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map to HTTP 503
|
||||
assert.equal(result.status, 503, 'Should map to 503 status');
|
||||
});
|
||||
|
||||
it('should handle errors without code by checking response.status', () => {
|
||||
// Given: Error with response.status instead of code
|
||||
const driveError = {
|
||||
response: { status: 404, statusText: 'Not Found' },
|
||||
message: 'Request failed'
|
||||
};
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should map using response.status
|
||||
assert.equal(result.status, 404, 'Should map using response.status');
|
||||
});
|
||||
|
||||
it('should default to 500 for unknown error codes', () => {
|
||||
// Given: Error with unknown status code
|
||||
const driveError = { code: 999, message: 'Unknown error' };
|
||||
|
||||
// When: Mapping error
|
||||
const result = mapDriveError(driveError);
|
||||
|
||||
// Then: Should default to 500
|
||||
assert.equal(result.status, 500, 'Should default to 500 for unknown codes');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: Rate Limiting (T050)', () => {
|
||||
|
||||
// Mock rate limiter (will be in proxy.js)
|
||||
class RateLimiter {
|
||||
constructor(maxRequests = 100, windowMs = 60000) {
|
||||
this.maxRequests = maxRequests;
|
||||
this.windowMs = windowMs;
|
||||
this.requests = new Map(); // ip -> [timestamps]
|
||||
}
|
||||
|
||||
checkLimit(ip) {
|
||||
const now = Date.now();
|
||||
const windowStart = now - this.windowMs;
|
||||
|
||||
// Get existing requests for this IP
|
||||
let timestamps = this.requests.get(ip) || [];
|
||||
|
||||
// Remove old timestamps outside window
|
||||
timestamps = timestamps.filter(ts => ts > windowStart);
|
||||
|
||||
// Check if limit exceeded
|
||||
if (timestamps.length >= this.maxRequests) {
|
||||
const oldestRequest = timestamps[0];
|
||||
const retryAfter = Math.ceil((oldestRequest + this.windowMs - now) / 1000);
|
||||
|
||||
return {
|
||||
allowed: false,
|
||||
statusCode: 429,
|
||||
retryAfter
|
||||
};
|
||||
}
|
||||
|
||||
// Add current request
|
||||
timestamps.push(now);
|
||||
this.requests.set(ip, timestamps);
|
||||
|
||||
return { allowed: true };
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
const now = Date.now();
|
||||
const windowStart = now - this.windowMs;
|
||||
|
||||
for (const [ip, timestamps] of this.requests.entries()) {
|
||||
const filtered = timestamps.filter(ts => ts > windowStart);
|
||||
if (filtered.length === 0) {
|
||||
this.requests.delete(ip);
|
||||
} else {
|
||||
this.requests.set(ip, filtered);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it('T050: should allow 100 requests from same IP within window', () => {
|
||||
// Given: Rate limiter with 100 req/min limit
|
||||
const limiter = new RateLimiter(100, 60000);
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Making 100 requests
|
||||
let allowedCount = 0;
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const result = limiter.checkLimit(testIp);
|
||||
if (result.allowed) allowedCount++;
|
||||
}
|
||||
|
||||
// Then: All 100 requests should be allowed
|
||||
assert.equal(allowedCount, 100, 'Should allow 100 requests');
|
||||
});
|
||||
|
||||
it('T050: should return 429 with Retry-After header on 101st request', () => {
|
||||
// Given: Rate limiter with 100 req/min limit
|
||||
const limiter = new RateLimiter(100, 60000);
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Making 101 requests
|
||||
for (let i = 0; i < 100; i++) {
|
||||
limiter.checkLimit(testIp);
|
||||
}
|
||||
|
||||
const result = limiter.checkLimit(testIp);
|
||||
|
||||
// Then: 101st request should be rate limited
|
||||
assert.equal(result.allowed, false, 'Should not allow 101st request');
|
||||
assert.equal(result.statusCode, 429, 'Should return 429 status');
|
||||
assert.ok(result.retryAfter > 0, 'Should include Retry-After in seconds');
|
||||
assert.ok(result.retryAfter <= 60, 'Retry-After should be <= 60 seconds');
|
||||
});
|
||||
|
||||
it('T050: should track requests per IP independently', () => {
|
||||
// Given: Rate limiter and multiple IPs
|
||||
const limiter = new RateLimiter(100, 60000);
|
||||
const ip1 = '192.168.1.1';
|
||||
const ip2 = '192.168.1.2';
|
||||
|
||||
// When: Making 100 requests from each IP
|
||||
for (let i = 0; i < 100; i++) {
|
||||
limiter.checkLimit(ip1);
|
||||
limiter.checkLimit(ip2);
|
||||
}
|
||||
|
||||
// Then: Both IPs should still be allowed (independent limits)
|
||||
const result1 = limiter.checkLimit(ip1);
|
||||
const result2 = limiter.checkLimit(ip2);
|
||||
|
||||
assert.equal(result1.allowed, false, 'IP1 should be rate limited');
|
||||
assert.equal(result2.allowed, false, 'IP2 should be rate limited');
|
||||
});
|
||||
|
||||
it('T050: should cleanup old entries outside time window', () => {
|
||||
// Given: Rate limiter with short window
|
||||
const limiter = new RateLimiter(10, 1000); // 10 req/sec for testing
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Making requests then cleaning up
|
||||
for (let i = 0; i < 10; i++) {
|
||||
limiter.checkLimit(testIp);
|
||||
}
|
||||
|
||||
// Wait for window to pass (simulate with manual cleanup)
|
||||
limiter.cleanup();
|
||||
|
||||
// Then: Should have entries in map
|
||||
assert.ok(limiter.requests.has(testIp), 'Should have IP in requests map');
|
||||
});
|
||||
|
||||
it('T050: should reset limit after time window expires', () => {
|
||||
// Given: Rate limiter with very short window
|
||||
const limiter = new RateLimiter(5, 100); // 5 req / 100ms
|
||||
const testIp = '192.168.1.1';
|
||||
|
||||
// When: Filling up limit
|
||||
for (let i = 0; i < 5; i++) {
|
||||
limiter.checkLimit(testIp);
|
||||
}
|
||||
|
||||
// Simulate time passing by manipulating timestamps
|
||||
const oldTimestamps = limiter.requests.get(testIp);
|
||||
const expiredTimestamps = oldTimestamps.map(ts => ts - 200); // Make them 200ms old
|
||||
limiter.requests.set(testIp, expiredTimestamps);
|
||||
|
||||
// Then: New request should be allowed after window
|
||||
const result = limiter.checkLimit(testIp);
|
||||
assert.equal(result.allowed, true, 'Should allow request after window expires');
|
||||
});
|
||||
});
|
||||
386
tests/unit/proxy-sitemap.test.js.old
Normal file
386
tests/unit/proxy-sitemap.test.js.old
Normal file
@@ -0,0 +1,386 @@
|
||||
/**
|
||||
* Unit Tests: Sitemap Generation Logic
|
||||
*
|
||||
* Tests sitemap XML generation functions
|
||||
* Tests T028, T029, T030
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
describe('Unit: escapeXml() (T028)', () => {
|
||||
|
||||
// Mock XML escape function (will be in proxy.js)
|
||||
function escapeXml(str) {
|
||||
if (typeof str !== 'string') return '';
|
||||
|
||||
return str
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''');
|
||||
}
|
||||
|
||||
it('T028: should escape < character to <', () => {
|
||||
// Given: String with < character
|
||||
const input = 'test < value';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape <
|
||||
assert.equal(output, 'test < value', 'Should escape <');
|
||||
});
|
||||
|
||||
it('T028: should escape > character to >', () => {
|
||||
// Given: String with > character
|
||||
const input = 'test > value';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape >
|
||||
assert.equal(output, 'test > value', 'Should escape >');
|
||||
});
|
||||
|
||||
it('T028: should escape & character to &', () => {
|
||||
// Given: String with & character
|
||||
const input = 'test & value';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape &
|
||||
assert.equal(output, 'test & value', 'Should escape &');
|
||||
});
|
||||
|
||||
it('T028: should escape " character to "', () => {
|
||||
// Given: String with " character
|
||||
const input = 'test "value"';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape "
|
||||
assert.equal(output, 'test "value"', 'Should escape "');
|
||||
});
|
||||
|
||||
it('T028: should escape \' character to '', () => {
|
||||
// Given: String with ' character
|
||||
const input = "test 'value'";
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape '
|
||||
assert.equal(output, 'test 'value'', 'Should escape \'');
|
||||
});
|
||||
|
||||
it('T028: should escape multiple special characters in correct order', () => {
|
||||
// Given: String with multiple special characters
|
||||
const input = '<tag attr="value" other=\'test\'>content & more</tag>';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should escape all characters properly
|
||||
assert.equal(
|
||||
output,
|
||||
'<tag attr="value" other='test'>content & more</tag>',
|
||||
'Should escape all XML special characters'
|
||||
);
|
||||
});
|
||||
|
||||
it('T028: should handle strings without special characters', () => {
|
||||
// Given: String without special characters
|
||||
const input = 'normal text 123';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should return unchanged
|
||||
assert.equal(output, input, 'Should not modify strings without special chars');
|
||||
});
|
||||
|
||||
it('T028: should handle empty string', () => {
|
||||
// Given: Empty string
|
||||
const input = '';
|
||||
|
||||
// When: Escaping for XML
|
||||
const output = escapeXml(input);
|
||||
|
||||
// Then: Should return empty string
|
||||
assert.equal(output, '', 'Should handle empty string');
|
||||
});
|
||||
|
||||
it('T028: should handle non-string input gracefully', () => {
|
||||
// Given: Non-string inputs
|
||||
const inputs = [null, undefined, 123, { foo: 'bar' }];
|
||||
|
||||
// When: Escaping each input
|
||||
// Then: Should return empty string for non-strings
|
||||
inputs.forEach(input => {
|
||||
const output = escapeXml(input);
|
||||
assert.equal(output, '', `Should return empty string for ${typeof input}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: formatSitemapEntry() (T029)', () => {
|
||||
|
||||
// Mock sitemap entry formatter (will be in proxy.js)
|
||||
function formatSitemapEntry(document, baseUrl) {
|
||||
function escapeXml(str) {
|
||||
return str.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
}
|
||||
|
||||
const loc = `${baseUrl}/${document.id}`;
|
||||
const lastmod = document.modifiedTime;
|
||||
|
||||
return ` <url>
|
||||
<loc>${escapeXml(loc)}</loc>
|
||||
<lastmod>${lastmod}</lastmod>
|
||||
</url>`;
|
||||
}
|
||||
|
||||
it('T029: should convert DriveDocument to XML url element', () => {
|
||||
// Given: DriveDocument metadata
|
||||
const document = {
|
||||
id: '1BxAA_test123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Should generate valid XML
|
||||
assert.ok(xml.includes('<url>'), 'Should contain opening url tag');
|
||||
assert.ok(xml.includes('</url>'), 'Should contain closing url tag');
|
||||
assert.ok(xml.includes('<loc>'), 'Should contain loc element');
|
||||
assert.ok(xml.includes('</loc>'), 'Should contain closing loc tag');
|
||||
assert.ok(xml.includes('<lastmod>'), 'Should contain lastmod element');
|
||||
assert.ok(xml.includes('</lastmod>'), 'Should contain closing lastmod tag');
|
||||
});
|
||||
|
||||
it('T029: should include correct location URL with documentId', () => {
|
||||
// Given: DriveDocument metadata
|
||||
const document = {
|
||||
id: '1BxAA_test123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Location should point to adapter endpoint
|
||||
assert.ok(
|
||||
xml.includes(`<loc>http://localhost:3000/${document.id}</loc>`),
|
||||
'Should include correct location URL'
|
||||
);
|
||||
});
|
||||
|
||||
it('T029: should include ISO 8601 lastmod timestamp', () => {
|
||||
// Given: DriveDocument with modified time
|
||||
const document = {
|
||||
id: '1BxAA_test123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Should include lastmod with ISO 8601 timestamp
|
||||
assert.ok(
|
||||
xml.includes('<lastmod>2026-03-06T10:30:00Z</lastmod>'),
|
||||
'Should include ISO 8601 lastmod timestamp'
|
||||
);
|
||||
});
|
||||
|
||||
it('T029: should escape special XML characters in URL', () => {
|
||||
// Given: DriveDocument with special characters in ID (edge case)
|
||||
const document = {
|
||||
id: '1BxAA-test&123',
|
||||
name: 'Test Document',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Formatting sitemap entry
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
|
||||
// Then: Should escape & in URL
|
||||
assert.ok(
|
||||
xml.includes('&'),
|
||||
'Should escape special XML characters in URL'
|
||||
);
|
||||
});
|
||||
|
||||
it('T029: should handle different baseUrl formats', () => {
|
||||
// Given: Different baseUrl formats
|
||||
const document = {
|
||||
id: '1BxAA_test',
|
||||
name: 'Test',
|
||||
modifiedTime: '2026-03-06T10:30:00Z'
|
||||
};
|
||||
|
||||
const baseUrls = [
|
||||
'http://localhost:3000',
|
||||
'https://example.com',
|
||||
'https://api.example.com/v1'
|
||||
];
|
||||
|
||||
// When: Formatting with each baseUrl
|
||||
// Then: Should generate correct loc for each
|
||||
baseUrls.forEach(baseUrl => {
|
||||
const xml = formatSitemapEntry(document, baseUrl);
|
||||
assert.ok(
|
||||
xml.includes(`<loc>${baseUrl}/${document.id}</loc>`),
|
||||
`Should work with baseUrl: ${baseUrl}`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unit: generateSitemap() Structure (T030)', () => {
|
||||
|
||||
// Mock sitemap generator structure (will be in proxy.js)
|
||||
function buildSitemapXml(documents, baseUrl) {
|
||||
function escapeXml(str) {
|
||||
return str.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
}
|
||||
|
||||
let xml = '<?xml version="1.0" encoding="UTF-8"?>\n';
|
||||
xml += '<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n';
|
||||
|
||||
documents.forEach(doc => {
|
||||
const loc = `${baseUrl}/${doc.id}`;
|
||||
xml += ` <url>\n`;
|
||||
xml += ` <loc>${escapeXml(loc)}</loc>\n`;
|
||||
xml += ` <lastmod>${doc.modifiedTime}</lastmod>\n`;
|
||||
xml += ` </url>\n`;
|
||||
});
|
||||
|
||||
xml += '</urlset>';
|
||||
|
||||
return xml;
|
||||
}
|
||||
|
||||
it('T030: should build complete XML with declaration', () => {
|
||||
// Given: Array of documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should start with XML declaration
|
||||
assert.ok(
|
||||
xml.startsWith('<?xml version="1.0"'),
|
||||
'Should start with XML declaration'
|
||||
);
|
||||
});
|
||||
|
||||
it('T030: should include correct sitemap namespace', () => {
|
||||
// Given: Array of documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should include sitemap protocol namespace
|
||||
assert.ok(
|
||||
xml.includes('<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'),
|
||||
'Should include correct sitemap namespace'
|
||||
);
|
||||
});
|
||||
|
||||
it('T030: should include closing urlset tag', () => {
|
||||
// Given: Array of documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should end with closing urlset tag
|
||||
assert.ok(xml.endsWith('</urlset>'), 'Should end with closing urlset tag');
|
||||
});
|
||||
|
||||
it('T030: should include multiple url entries for multiple documents', () => {
|
||||
// Given: Multiple documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_doc1', name: 'Doc 1', modifiedTime: '2026-03-06T10:00:00Z' },
|
||||
{ id: '2CyBB_doc2', name: 'Doc 2', modifiedTime: '2026-03-06T11:00:00Z' },
|
||||
{ id: '3DzCC_doc3', name: 'Doc 3', modifiedTime: '2026-03-06T12:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should include all documents
|
||||
const urlCount = (xml.match(/<url>/g) || []).length;
|
||||
assert.equal(urlCount, 3, 'Should include 3 url entries');
|
||||
|
||||
// Then: Each document should have its loc
|
||||
documents.forEach(doc => {
|
||||
assert.ok(
|
||||
xml.includes(`<loc>http://localhost:3000/${doc.id}</loc>`),
|
||||
`Should include url entry for ${doc.id}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('T030: should handle empty document list', () => {
|
||||
// Given: Empty documents array
|
||||
const documents = [];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: Should still have valid XML structure
|
||||
assert.ok(xml.includes('<?xml version'), 'Should have XML declaration');
|
||||
assert.ok(xml.includes('<urlset'), 'Should have urlset opening');
|
||||
assert.ok(xml.includes('</urlset>'), 'Should have urlset closing');
|
||||
|
||||
// Then: Should have no url entries
|
||||
const urlCount = (xml.match(/<url>/g) || []).length;
|
||||
assert.equal(urlCount, 0, 'Should have no url entries');
|
||||
});
|
||||
|
||||
it('T030: should generate valid XML that browsers can parse', () => {
|
||||
// Given: Sample documents
|
||||
const documents = [
|
||||
{ id: '1BxAA_test', name: 'Test', modifiedTime: '2026-03-06T10:00:00Z' }
|
||||
];
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// When: Building sitemap XML
|
||||
const xml = buildSitemapXml(documents, baseUrl);
|
||||
|
||||
// Then: XML should be well-formed (basic checks)
|
||||
// Count opening and closing tags
|
||||
const openingUrlset = (xml.match(/<urlset/g) || []).length;
|
||||
const closingUrlset = (xml.match(/<\/urlset>/g) || []).length;
|
||||
assert.equal(openingUrlset, closingUrlset, 'urlset tags should be balanced');
|
||||
|
||||
const openingUrl = (xml.match(/<url>/g) || []).length;
|
||||
const closingUrl = (xml.match(/<\/url>/g) || []).length;
|
||||
assert.equal(openingUrl, closingUrl, 'url tags should be balanced');
|
||||
});
|
||||
});
|
||||
317
tests/unit/queue.test.js
Normal file
317
tests/unit/queue.test.js
Normal file
@@ -0,0 +1,317 @@
|
||||
/**
|
||||
* Unit Tests: FIFO Request Queue
|
||||
*
|
||||
* Tests T038-T039: Test FIFO queue implementation
|
||||
* Tests the queue.js module in isolation
|
||||
*
|
||||
* @module tests/unit/queue
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
// =============================================================================
|
||||
// T038: Unit test for FIFO queue enqueue/dequeue
|
||||
// =============================================================================
|
||||
|
||||
describe('T038: FIFO Queue Enqueue/Dequeue', () => {
|
||||
it('should enqueue and dequeue requests in FIFO order', async () => {
|
||||
// TODO: Import RequestQueue from src/queue.js
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
const results = [];
|
||||
|
||||
// Enqueue 3 tasks
|
||||
const task1 = async () => {
|
||||
await delay(10);
|
||||
results.push('task1');
|
||||
return 'result1';
|
||||
};
|
||||
|
||||
const task2 = async () => {
|
||||
await delay(10);
|
||||
results.push('task2');
|
||||
return 'result2';
|
||||
};
|
||||
|
||||
const task3 = async () => {
|
||||
await delay(10);
|
||||
results.push('task3');
|
||||
return 'result3';
|
||||
};
|
||||
|
||||
// Enqueue all tasks
|
||||
// const promise1 = queue.enqueue(task1);
|
||||
// const promise2 = queue.enqueue(task2);
|
||||
// const promise3 = queue.enqueue(task3);
|
||||
|
||||
// Wait for all to complete
|
||||
// await Promise.all([promise1, promise2, promise3]);
|
||||
|
||||
// Verify FIFO order
|
||||
// assert.deepEqual(results, ['task1', 'task2', 'task3'], 'Tasks should complete in FIFO order');
|
||||
});
|
||||
|
||||
it('should process tasks sequentially (one at a time)', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
let activeTaskCount = 0;
|
||||
let maxActiveTaskCount = 0;
|
||||
|
||||
const createTask = (id) => async () => {
|
||||
activeTaskCount++;
|
||||
maxActiveTaskCount = Math.max(maxActiveTaskCount, activeTaskCount);
|
||||
|
||||
await delay(50);
|
||||
|
||||
activeTaskCount--;
|
||||
return `task${id}`;
|
||||
};
|
||||
|
||||
// Enqueue multiple tasks
|
||||
const promises = [];
|
||||
for (let i = 1; i <= 5; i++) {
|
||||
// promises.push(queue.enqueue(createTask(i)));
|
||||
}
|
||||
|
||||
// await Promise.all(promises);
|
||||
|
||||
// Verify only one task was active at a time
|
||||
// assert.equal(maxActiveTaskCount, 1, 'Only one task should be active at a time');
|
||||
});
|
||||
|
||||
it('should maintain queue order when tasks are added during processing', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
const results = [];
|
||||
|
||||
// Add initial task
|
||||
// queue.enqueue(async () => {
|
||||
// await delay(20);
|
||||
// results.push('task1');
|
||||
// });
|
||||
|
||||
// Add second task after slight delay
|
||||
// await delay(5);
|
||||
// queue.enqueue(async () => {
|
||||
// await delay(10);
|
||||
// results.push('task2');
|
||||
// });
|
||||
|
||||
// Add third task after slight delay
|
||||
// await delay(5);
|
||||
// queue.enqueue(async () => {
|
||||
// await delay(10);
|
||||
// results.push('task3');
|
||||
// });
|
||||
|
||||
// Wait for all tasks to complete
|
||||
// await delay(100);
|
||||
|
||||
// Verify order preserved
|
||||
// assert.deepEqual(results, ['task1', 'task2', 'task3'], 'Should maintain FIFO order even when tasks added during processing');
|
||||
});
|
||||
|
||||
it('should return task result through promise', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
const task = async () => {
|
||||
return 'test-result';
|
||||
};
|
||||
|
||||
// const result = await queue.enqueue(task);
|
||||
|
||||
// assert.equal(result, 'test-result', 'Should return task result through promise');
|
||||
});
|
||||
|
||||
it('should propagate task errors through promise', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
const task = async () => {
|
||||
throw new Error('Task failed');
|
||||
};
|
||||
|
||||
// await assert.rejects(
|
||||
// async () => await queue.enqueue(task),
|
||||
// { message: 'Task failed' },
|
||||
// 'Should propagate task error'
|
||||
// );
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// T039: Unit test for FIFO queue concurrent request handling
|
||||
// =============================================================================
|
||||
|
||||
describe('T039: FIFO Queue Concurrent Request Handling', () => {
|
||||
it('should use processing flag to prevent simultaneous execution', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
let processingCheckpoints = [];
|
||||
|
||||
const createTask = (id) => async () => {
|
||||
// Log when task starts
|
||||
processingCheckpoints.push({ id, event: 'start', time: Date.now() });
|
||||
|
||||
await delay(30);
|
||||
|
||||
// Log when task ends
|
||||
processingCheckpoints.push({ id, event: 'end', time: Date.now() });
|
||||
|
||||
return id;
|
||||
};
|
||||
|
||||
// Enqueue 3 tasks simultaneously
|
||||
const promises = [
|
||||
// queue.enqueue(createTask(1)),
|
||||
// queue.enqueue(createTask(2)),
|
||||
// queue.enqueue(createTask(3))
|
||||
];
|
||||
|
||||
// await Promise.all(promises);
|
||||
|
||||
// Verify processing flag prevented overlap
|
||||
// Check that task N ends before task N+1 starts
|
||||
// const task1End = processingCheckpoints.find(cp => cp.id === 1 && cp.event === 'end');
|
||||
// const task2Start = processingCheckpoints.find(cp => cp.id === 2 && cp.event === 'start');
|
||||
// const task2End = processingCheckpoints.find(cp => cp.id === 2 && cp.event === 'end');
|
||||
// const task3Start = processingCheckpoints.find(cp => cp.id === 3 && cp.event === 'start');
|
||||
|
||||
// assert.ok(task1End.time <= task2Start.time, 'Task 2 should start after Task 1 ends');
|
||||
// assert.ok(task2End.time <= task3Start.time, 'Task 3 should start after Task 2 ends');
|
||||
});
|
||||
|
||||
it('should clear processing flag after task completes', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
// Add task
|
||||
// await queue.enqueue(async () => {
|
||||
// await delay(10);
|
||||
// return 'done';
|
||||
// });
|
||||
|
||||
// Verify processing flag is cleared (queue can accept new tasks)
|
||||
// assert.equal(queue.isProcessing(), false, 'Processing flag should be cleared after task completes');
|
||||
});
|
||||
|
||||
it('should clear processing flag even if task throws error', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
// Add task that throws error
|
||||
try {
|
||||
// await queue.enqueue(async () => {
|
||||
// await delay(10);
|
||||
// throw new Error('Task failed');
|
||||
// });
|
||||
} catch (e) {
|
||||
// Expected error
|
||||
}
|
||||
|
||||
// Verify processing flag is cleared (queue can accept new tasks)
|
||||
// assert.equal(queue.isProcessing(), false, 'Processing flag should be cleared even after task error');
|
||||
|
||||
// Verify next task can be processed
|
||||
// const result = await queue.enqueue(async () => 'next-task');
|
||||
// assert.equal(result, 'next-task', 'Next task should process successfully after error');
|
||||
});
|
||||
|
||||
it('should handle empty queue correctly (no processing when queue empty)', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
// Verify processing flag is false for empty queue
|
||||
// assert.equal(queue.isProcessing(), false, 'Processing flag should be false for empty queue');
|
||||
// assert.equal(queue.getQueueLength(), 0, 'Queue should be empty');
|
||||
});
|
||||
|
||||
it('should use EventEmitter for queue management', async () => {
|
||||
// Per task spec: "Implement FIFO request queue class in src/queue.js using Node.js EventEmitter"
|
||||
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
// Verify queue extends or uses EventEmitter
|
||||
// assert.ok(queue.on, 'Queue should have EventEmitter methods');
|
||||
// assert.ok(queue.emit, 'Queue should have emit method');
|
||||
});
|
||||
|
||||
it('should maintain queue array for pending tasks', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
// Add tasks without waiting
|
||||
// queue.enqueue(async () => {
|
||||
// await delay(50);
|
||||
// return 'task1';
|
||||
// });
|
||||
// queue.enqueue(async () => 'task2');
|
||||
// queue.enqueue(async () => 'task3');
|
||||
|
||||
// Check queue length while first task is processing
|
||||
// await delay(10); // Let first task start processing
|
||||
|
||||
// Queue should have 2 pending tasks (task2 and task3)
|
||||
// Note: task1 is being processed, not in queue
|
||||
// assert.ok(queue.getQueueLength() >= 2, 'Queue should contain pending tasks');
|
||||
});
|
||||
|
||||
it('should process queue in correct order after processing flag is cleared', async () => {
|
||||
// TODO: Import RequestQueue
|
||||
// const { RequestQueue } = await import('../../src/queue.js');
|
||||
// const queue = new RequestQueue();
|
||||
|
||||
const results = [];
|
||||
|
||||
// Add first task (starts processing immediately)
|
||||
// queue.enqueue(async () => {
|
||||
// await delay(30);
|
||||
// results.push('task1');
|
||||
// });
|
||||
|
||||
// Add more tasks while first is processing
|
||||
// await delay(5);
|
||||
// queue.enqueue(async () => {
|
||||
// results.push('task2');
|
||||
// });
|
||||
// queue.enqueue(async () => {
|
||||
// results.push('task3');
|
||||
// });
|
||||
|
||||
// Wait for all to complete
|
||||
// await delay(100);
|
||||
|
||||
// Verify FIFO order maintained
|
||||
// assert.deepEqual(results, ['task1', 'task2', 'task3'], 'Should process in FIFO order after processing flag cleared');
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// Helper Functions
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Delay helper for async tests
|
||||
* @param {number} ms - Milliseconds to delay
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
function delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
366
tests/unit/sitemap-generator.test.js
Normal file
366
tests/unit/sitemap-generator.test.js
Normal file
@@ -0,0 +1,366 @@
|
||||
/**
|
||||
* Unit Tests: Sitemap Generator
|
||||
*
|
||||
* Tests T035-T037, T040: Test sitemap XML generation and transformations
|
||||
* Tests the sitemap-generator.js module in isolation
|
||||
*
|
||||
* @module tests/unit/sitemap-generator
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
// =============================================================================
|
||||
// T035: Unit test for sitemap XML generation
|
||||
// =============================================================================
|
||||
|
||||
describe('T035: Sitemap XML Generation', () => {
|
||||
it('should generate valid sitemap XML with correct structure', () => {
|
||||
// Mock sitemap entries
|
||||
const mockEntries = [
|
||||
{
|
||||
loc: 'http://localhost:3000/documents/doc1',
|
||||
lastmod: '2024-03-01'
|
||||
},
|
||||
{
|
||||
loc: 'http://localhost:3000/documents/doc2',
|
||||
lastmod: '2024-03-02'
|
||||
}
|
||||
];
|
||||
|
||||
// TODO: Import generateSitemapXML from src/sitemap-generator.js
|
||||
// const { generateSitemapXML } = await import('../../src/sitemap-generator.js');
|
||||
// const xml = generateSitemapXML(mockEntries);
|
||||
|
||||
// Verify XML structure
|
||||
const expectedXml = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||
<url>
|
||||
<loc>http://localhost:3000/documents/doc1</loc>
|
||||
<lastmod>2024-03-01</lastmod>
|
||||
</url>
|
||||
<url>
|
||||
<loc>http://localhost:3000/documents/doc2</loc>
|
||||
<lastmod>2024-03-02</lastmod>
|
||||
</url>
|
||||
</urlset>`;
|
||||
|
||||
// assert.ok(xml.includes('<?xml version="1.0" encoding="UTF-8"?>'), 'Should have XML declaration');
|
||||
// assert.ok(xml.includes('<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'), 'Should have urlset with namespace');
|
||||
// assert.ok(xml.includes('</urlset>'), 'Should close urlset');
|
||||
// assert.ok(xml.includes('<loc>http://localhost:3000/documents/doc1</loc>'), 'Should include first URL');
|
||||
// assert.ok(xml.includes('<loc>http://localhost:3000/documents/doc2</loc>'), 'Should include second URL');
|
||||
});
|
||||
|
||||
it('should generate URL entries in correct RESTful format /documents/{documentId}', () => {
|
||||
const mockEntries = [
|
||||
{
|
||||
loc: 'http://localhost:3000/documents/abc123',
|
||||
lastmod: '2024-03-01'
|
||||
}
|
||||
];
|
||||
|
||||
// TODO: Import generateSitemapXML
|
||||
// const { generateSitemapXML } = await import('../../src/sitemap-generator.js');
|
||||
// const xml = generateSitemapXML(mockEntries);
|
||||
|
||||
// Verify RESTful URL format
|
||||
// assert.match(xml, /<loc>http:\/\/localhost:3000\/documents\/abc123<\/loc>/, 'Should use RESTful URL format');
|
||||
});
|
||||
|
||||
it('should generate empty sitemap when no entries provided', () => {
|
||||
const mockEntries = [];
|
||||
|
||||
// TODO: Import generateSitemapXML
|
||||
// const { generateSitemapXML } = await import('../../src/sitemap-generator.js');
|
||||
// const xml = generateSitemapXML(mockEntries);
|
||||
|
||||
// Verify empty sitemap structure
|
||||
// assert.ok(xml.includes('<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'), 'Should have urlset');
|
||||
// assert.ok(xml.includes('</urlset>'), 'Should close urlset');
|
||||
// assert.ok(!xml.includes('<url>'), 'Should not contain any url entries');
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// T036: Unit test for Document to SitemapEntry transformation
|
||||
// =============================================================================
|
||||
|
||||
describe('T036: Document to SitemapEntry Transformation', () => {
|
||||
it('should transform Document to SitemapEntry with correct URL format', () => {
|
||||
// Mock Document from Drive API
|
||||
const mockDocument = {
|
||||
id: 'abc123',
|
||||
name: 'Test Document',
|
||||
mimeType: 'application/pdf',
|
||||
modifiedTime: '2024-03-01T10:30:00Z'
|
||||
};
|
||||
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// TODO: Import toSitemapEntry from src/sitemap-generator.js
|
||||
// const { toSitemapEntry } = await import('../../src/sitemap-generator.js');
|
||||
// const entry = toSitemapEntry(mockDocument, baseUrl);
|
||||
|
||||
// Verify transformation
|
||||
// assert.equal(entry.loc, 'http://localhost:3000/documents/abc123', 'Should construct URL with baseUrl + /documents/ + documentId');
|
||||
// assert.equal(entry.lastmod, '2024-03-01', 'Should format lastmod as YYYY-MM-DD');
|
||||
});
|
||||
|
||||
it('should use encodeURIComponent for document ID in URL', () => {
|
||||
// Document ID with special characters that need URL encoding
|
||||
const mockDocument = {
|
||||
id: 'doc with spaces',
|
||||
name: 'Test',
|
||||
mimeType: 'application/pdf',
|
||||
modifiedTime: '2024-03-01T10:30:00Z'
|
||||
};
|
||||
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// TODO: Import toSitemapEntry
|
||||
// const { toSitemapEntry } = await import('../../src/sitemap-generator.js');
|
||||
// const entry = toSitemapEntry(mockDocument, baseUrl);
|
||||
|
||||
// Verify URL encoding
|
||||
// assert.equal(entry.loc, 'http://localhost:3000/documents/doc%20with%20spaces', 'Should URL-encode document ID');
|
||||
});
|
||||
|
||||
it('should concatenate baseUrl + /documents/ + documentId correctly', () => {
|
||||
const testCases = [
|
||||
{
|
||||
baseUrl: 'http://localhost:3000',
|
||||
documentId: 'doc1',
|
||||
expected: 'http://localhost:3000/documents/doc1'
|
||||
},
|
||||
{
|
||||
baseUrl: 'https://example.com',
|
||||
documentId: 'doc2',
|
||||
expected: 'https://example.com/documents/doc2'
|
||||
},
|
||||
{
|
||||
baseUrl: 'http://localhost:3000/', // With trailing slash
|
||||
documentId: 'doc3',
|
||||
expected: 'http://localhost:3000/documents/doc3' // Should handle trailing slash
|
||||
}
|
||||
];
|
||||
|
||||
// TODO: Import toSitemapEntry
|
||||
// const { toSitemapEntry } = await import('../../src/sitemap-generator.js');
|
||||
|
||||
testCases.forEach(testCase => {
|
||||
const mockDocument = {
|
||||
id: testCase.documentId,
|
||||
name: 'Test',
|
||||
mimeType: 'application/pdf'
|
||||
};
|
||||
|
||||
// const entry = toSitemapEntry(mockDocument, testCase.baseUrl);
|
||||
// assert.equal(entry.loc, testCase.expected, `Should correctly concatenate URL for baseUrl: ${testCase.baseUrl}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle documents without modifiedTime', () => {
|
||||
const mockDocument = {
|
||||
id: 'doc1',
|
||||
name: 'Test Document',
|
||||
mimeType: 'application/pdf'
|
||||
// No modifiedTime
|
||||
};
|
||||
|
||||
const baseUrl = 'http://localhost:3000';
|
||||
|
||||
// TODO: Import toSitemapEntry
|
||||
// const { toSitemapEntry } = await import('../../src/sitemap-generator.js');
|
||||
// const entry = toSitemapEntry(mockDocument, baseUrl);
|
||||
|
||||
// Verify lastmod is undefined or omitted
|
||||
// assert.equal(entry.loc, 'http://localhost:3000/documents/doc1', 'Should have loc');
|
||||
// assert.equal(entry.lastmod, undefined, 'Should not have lastmod when modifiedTime is missing');
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// T037: Unit test for lastmod date formatting
|
||||
// =============================================================================
|
||||
|
||||
describe('T037: lastmod Date Formatting', () => {
|
||||
it('should format modifiedTime as ISO 8601 date (YYYY-MM-DD)', () => {
|
||||
const testCases = [
|
||||
{
|
||||
modifiedTime: '2024-03-01T10:30:00Z',
|
||||
expected: '2024-03-01'
|
||||
},
|
||||
{
|
||||
modifiedTime: '2024-12-31T23:59:59Z',
|
||||
expected: '2024-12-31'
|
||||
},
|
||||
{
|
||||
modifiedTime: '2024-01-15T00:00:00Z',
|
||||
expected: '2024-01-15'
|
||||
}
|
||||
];
|
||||
|
||||
// TODO: Import formatLastmod or toSitemapEntry
|
||||
// const { toSitemapEntry } = await import('../../src/sitemap-generator.js');
|
||||
|
||||
testCases.forEach(testCase => {
|
||||
const mockDocument = {
|
||||
id: 'doc1',
|
||||
name: 'Test',
|
||||
mimeType: 'application/pdf',
|
||||
modifiedTime: testCase.modifiedTime
|
||||
};
|
||||
|
||||
// const entry = toSitemapEntry(mockDocument, 'http://localhost:3000');
|
||||
// assert.equal(entry.lastmod, testCase.expected, `Should format ${testCase.modifiedTime} as ${testCase.expected}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract date part from ISO 8601 timestamp', () => {
|
||||
// modifiedTime from Drive API is full ISO 8601 timestamp
|
||||
const modifiedTime = '2024-03-01T10:30:45.123Z';
|
||||
|
||||
// TODO: Import formatLastmod or toSitemapEntry
|
||||
// const { toSitemapEntry } = await import('../../src/sitemap-generator.js');
|
||||
|
||||
const mockDocument = {
|
||||
id: 'doc1',
|
||||
name: 'Test',
|
||||
mimeType: 'application/pdf',
|
||||
modifiedTime
|
||||
};
|
||||
|
||||
// const entry = toSitemapEntry(mockDocument, 'http://localhost:3000');
|
||||
|
||||
// Should extract only date part (YYYY-MM-DD)
|
||||
// assert.equal(entry.lastmod, '2024-03-01', 'Should extract date part only');
|
||||
// assert.match(entry.lastmod, /^\d{4}-\d{2}-\d{2}$/, 'Should match YYYY-MM-DD format');
|
||||
});
|
||||
|
||||
it('should handle different timezone formats in modifiedTime', () => {
|
||||
const testCases = [
|
||||
'2024-03-01T10:30:00Z', // UTC
|
||||
'2024-03-01T10:30:00+00:00', // UTC with offset
|
||||
'2024-03-01T10:30:00-08:00', // PST
|
||||
'2024-03-01T10:30:00+05:30' // IST
|
||||
];
|
||||
|
||||
// TODO: Import toSitemapEntry
|
||||
// const { toSitemapEntry } = await import('../../src/sitemap-generator.js');
|
||||
|
||||
testCases.forEach(modifiedTime => {
|
||||
const mockDocument = {
|
||||
id: 'doc1',
|
||||
name: 'Test',
|
||||
mimeType: 'application/pdf',
|
||||
modifiedTime
|
||||
};
|
||||
|
||||
// const entry = toSitemapEntry(mockDocument, 'http://localhost:3000');
|
||||
|
||||
// Should parse all timezone formats correctly
|
||||
// assert.match(entry.lastmod, /^\d{4}-\d{2}-\d{2}$/, `Should format date correctly for ${modifiedTime}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// T040: Unit test for XML special character escaping
|
||||
// =============================================================================
|
||||
|
||||
describe('T040: XML Special Character Escaping', () => {
|
||||
it('should escape ampersand (&) as &', () => {
|
||||
const url = 'http://localhost:3000/documents/doc&test';
|
||||
|
||||
// TODO: Import escapeXml from src/xml-utils.js
|
||||
// const { escapeXml } = await import('../../src/xml-utils.js');
|
||||
// const escaped = escapeXml(url);
|
||||
|
||||
// assert.equal(escaped, 'http://localhost:3000/documents/doc&test', 'Should escape & as &');
|
||||
// assert.ok(!escaped.includes('&test'), 'Should not contain unescaped &');
|
||||
});
|
||||
|
||||
it('should escape less than (<) as <', () => {
|
||||
const url = 'http://localhost:3000/documents/doc<123';
|
||||
|
||||
// TODO: Import escapeXml
|
||||
// const { escapeXml } = await import('../../src/xml-utils.js');
|
||||
// const escaped = escapeXml(url);
|
||||
|
||||
// assert.equal(escaped, 'http://localhost:3000/documents/doc<123', 'Should escape < as <');
|
||||
});
|
||||
|
||||
it('should escape greater than (>) as >', () => {
|
||||
const url = 'http://localhost:3000/documents/doc>456';
|
||||
|
||||
// TODO: Import escapeXml
|
||||
// const { escapeXml } = await import('../../src/xml-utils.js');
|
||||
// const escaped = escapeXml(url);
|
||||
|
||||
// assert.equal(escaped, 'http://localhost:3000/documents/doc>456', 'Should escape > as >');
|
||||
});
|
||||
|
||||
it('should escape double quote (") as "', () => {
|
||||
const url = 'http://localhost:3000/documents/doc"test';
|
||||
|
||||
// TODO: Import escapeXml
|
||||
// const { escapeXml } = await import('../../src/xml-utils.js');
|
||||
// const escaped = escapeXml(url);
|
||||
|
||||
// assert.equal(escaped, 'http://localhost:3000/documents/doc"test', 'Should escape " as "');
|
||||
});
|
||||
|
||||
it('should escape single quote (\') as '', () => {
|
||||
const url = "http://localhost:3000/documents/doc'xyz";
|
||||
|
||||
// TODO: Import escapeXml
|
||||
// const { escapeXml } = await import('../../src/xml-utils.js');
|
||||
// const escaped = escapeXml(url);
|
||||
|
||||
// assert.equal(escaped, "http://localhost:3000/documents/doc'xyz", "Should escape ' as '");
|
||||
});
|
||||
|
||||
it('should escape multiple special characters in same string', () => {
|
||||
const url = 'http://localhost:3000/documents/a&b<c>d"e\'f';
|
||||
|
||||
// TODO: Import escapeXml
|
||||
// const { escapeXml } = await import('../../src/xml-utils.js');
|
||||
// const escaped = escapeXml(url);
|
||||
|
||||
// assert.equal(
|
||||
// escaped,
|
||||
// 'http://localhost:3000/documents/a&b<c>d"e'f',
|
||||
// 'Should escape all special characters'
|
||||
// );
|
||||
});
|
||||
|
||||
it('should not double-escape already escaped characters', () => {
|
||||
const url = 'http://localhost:3000/documents/doc&test';
|
||||
|
||||
// TODO: Import escapeXml
|
||||
// const { escapeXml } = await import('../../src/xml-utils.js');
|
||||
// const escaped = escapeXml(url);
|
||||
|
||||
// Should not double-escape
|
||||
// assert.ok(!escaped.includes('&amp;'), 'Should not double-escape &');
|
||||
});
|
||||
|
||||
it('should handle empty string', () => {
|
||||
// TODO: Import escapeXml
|
||||
// const { escapeXml } = await import('../../src/xml-utils.js');
|
||||
// const escaped = escapeXml('');
|
||||
|
||||
// assert.equal(escaped, '', 'Should return empty string for empty input');
|
||||
});
|
||||
|
||||
it('should handle string with no special characters', () => {
|
||||
const url = 'http://localhost:3000/documents/doc123';
|
||||
|
||||
// TODO: Import escapeXml
|
||||
// const { escapeXml } = await import('../../src/xml-utils.js');
|
||||
// const escaped = escapeXml(url);
|
||||
|
||||
// assert.equal(escaped, url, 'Should return unchanged string when no special chars');
|
||||
});
|
||||
});
|
||||
103
tests/unit/utils.test.js
Normal file
103
tests/unit/utils.test.js
Normal file
@@ -0,0 +1,103 @@
|
||||
/**
|
||||
* Unit Tests for General Utilities
|
||||
* Tests request ID generation and document ID validation
|
||||
*/
|
||||
|
||||
import { test, describe } from 'node:test';
|
||||
import assert from 'node:assert';
|
||||
import crypto from 'node:crypto';
|
||||
|
||||
// Set up globals that server.js would provide
|
||||
globalThis.crypto = crypto;
|
||||
globalThis.config = { google: {}, server: {}, sitemap: {} };
|
||||
|
||||
import { generateRequestId, validateDocumentId } from '../../src/proxy.js';
|
||||
|
||||
describe('Unit: Request ID Generation', () => {
|
||||
|
||||
test('T046: Should generate unique request ID', () => {
|
||||
const id1 = generateRequestId();
|
||||
const id2 = generateRequestId();
|
||||
|
||||
assert.ok(id1, 'Should generate ID');
|
||||
assert.ok(id2, 'Should generate second ID');
|
||||
assert.notStrictEqual(id1, id2, 'IDs should be unique');
|
||||
});
|
||||
|
||||
test('T046: Should generate ID with req_ prefix', () => {
|
||||
const id = generateRequestId();
|
||||
assert.ok(id.startsWith('req_'), 'Should start with req_ prefix');
|
||||
});
|
||||
|
||||
test('T046: Should generate valid UUID format', () => {
|
||||
const id = generateRequestId();
|
||||
const uuidPart = id.substring(4); // Remove 'req_' prefix
|
||||
|
||||
// UUID v4 format: xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx
|
||||
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i;
|
||||
assert.ok(uuidRegex.test(uuidPart), 'Should be valid UUID v4');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('Unit: Document ID Validation', () => {
|
||||
|
||||
test('T046: Should accept valid Google Drive IDs', () => {
|
||||
const validIds = [
|
||||
'1BxAA_example123',
|
||||
'abcdefghijklmnop',
|
||||
'12345678',
|
||||
'test-doc-id_123',
|
||||
'ABCDEFGH-IJKLMNOP_12345678'
|
||||
];
|
||||
|
||||
for (const id of validIds) {
|
||||
assert.ok(
|
||||
validateDocumentId(id),
|
||||
`Should accept valid ID: ${id}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test('T046: Should reject IDs that are too short', () => {
|
||||
const shortId = 'abc1234'; // 7 characters (minimum is 8)
|
||||
assert.strictEqual(validateDocumentId(shortId), false);
|
||||
});
|
||||
|
||||
test('T046: Should reject IDs that are too long', () => {
|
||||
const longId = 'a'.repeat(129); // 129 characters (maximum is 128)
|
||||
assert.strictEqual(validateDocumentId(longId), false);
|
||||
});
|
||||
|
||||
test('T046: Should reject IDs with invalid characters', () => {
|
||||
const invalidIds = [
|
||||
'invalid@id',
|
||||
'invalid id', // space
|
||||
'invalid/id', // slash
|
||||
'invalid#id', // hash
|
||||
'invalid.id', // period
|
||||
'invalid$id' // dollar sign
|
||||
];
|
||||
|
||||
for (const id of invalidIds) {
|
||||
assert.strictEqual(
|
||||
validateDocumentId(id),
|
||||
false,
|
||||
`Should reject invalid ID: ${id}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test('T046: Should reject null, undefined, and non-strings', () => {
|
||||
assert.strictEqual(validateDocumentId(null), false);
|
||||
assert.strictEqual(validateDocumentId(undefined), false);
|
||||
assert.strictEqual(validateDocumentId(123), false);
|
||||
assert.strictEqual(validateDocumentId({}), false);
|
||||
assert.strictEqual(validateDocumentId([]), false);
|
||||
});
|
||||
|
||||
test('T046: Should reject empty string', () => {
|
||||
assert.strictEqual(validateDocumentId(''), false);
|
||||
});
|
||||
|
||||
});
|
||||
63
tests/unit/xml-utils.test.js
Normal file
63
tests/unit/xml-utils.test.js
Normal file
@@ -0,0 +1,63 @@
|
||||
/**
|
||||
* Unit Tests for XML Utilities
|
||||
* Tests XML escaping functionality
|
||||
*/
|
||||
|
||||
import { test, describe } from 'node:test';
|
||||
import assert from 'node:assert';
|
||||
import { escapeXml } from '../../src/xml-utils.js';
|
||||
|
||||
describe('Unit: XML Escaping', () => {
|
||||
|
||||
test('T045: Should escape ampersand (&)', () => {
|
||||
const input = 'Rock & Roll';
|
||||
const expected = 'Rock & Roll';
|
||||
assert.strictEqual(escapeXml(input), expected);
|
||||
});
|
||||
|
||||
test('T045: Should escape less than (<)', () => {
|
||||
const input = '5 < 10';
|
||||
const expected = '5 < 10';
|
||||
assert.strictEqual(escapeXml(input), expected);
|
||||
});
|
||||
|
||||
test('T045: Should escape greater than (>)', () => {
|
||||
const input = '10 > 5';
|
||||
const expected = '10 > 5';
|
||||
assert.strictEqual(escapeXml(input), expected);
|
||||
});
|
||||
|
||||
test('T045: Should escape double quote (")', () => {
|
||||
const input = 'Say "Hello"';
|
||||
const expected = 'Say "Hello"';
|
||||
assert.strictEqual(escapeXml(input), expected);
|
||||
});
|
||||
|
||||
test('T045: Should escape single quote (\')', () => {
|
||||
const input = "It's working";
|
||||
const expected = 'It's working';
|
||||
assert.strictEqual(escapeXml(input), expected);
|
||||
});
|
||||
|
||||
test('T045: Should escape multiple special characters', () => {
|
||||
const input = '<tag attr="value">Content & stuff</tag>';
|
||||
const expected = '<tag attr="value">Content & stuff</tag>';
|
||||
assert.strictEqual(escapeXml(input), expected);
|
||||
});
|
||||
|
||||
test('T045: Should handle empty string', () => {
|
||||
assert.strictEqual(escapeXml(''), '');
|
||||
});
|
||||
|
||||
test('T045: Should handle non-string input', () => {
|
||||
assert.strictEqual(escapeXml(null), '');
|
||||
assert.strictEqual(escapeXml(undefined), '');
|
||||
assert.strictEqual(escapeXml(123), '');
|
||||
});
|
||||
|
||||
test('T045: Should not modify safe strings', () => {
|
||||
const input = 'This is a safe string 123';
|
||||
assert.strictEqual(escapeXml(input), input);
|
||||
});
|
||||
|
||||
});
|
||||
Reference in New Issue
Block a user