Merge pull request 'OIDC Authorization implemented' (#1) from 001-oidc-proxy-script into main

Reviewed-on: #1
This commit was merged in pull request #1.
This commit is contained in:
2026-04-22 19:53:32 -05:00
61 changed files with 5371 additions and 38 deletions

View File

@@ -10,6 +10,40 @@ $ARGUMENTS
You **MUST** consider the user input before proceeding (if not empty).
## Pre-Execution Checks
**Check for extension hooks (before analysis)**:
- Check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.before_analyze` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Pre-Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Pre-Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
Wait for the result of the hook command before proceeding to the Goal.
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently
## Goal
Identify inconsistencies, duplications, ambiguities, and underspecified items across the three core artifacts (`spec.md`, `plan.md`, `tasks.md`) before implementation. This command MUST run only after `/speckit.tasks` has successfully produced a complete `tasks.md`.
@@ -162,6 +196,37 @@ At end of report, output a concise Next Actions block:
Ask the user: "Would you like me to suggest concrete remediation edits for the top N issues?" (Do NOT apply them automatically.)
### 9. Check for extension hooks
After reporting, check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.after_analyze` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently
## Operating Principles
### Context Efficiency

View File

@@ -31,6 +31,40 @@ $ARGUMENTS
You **MUST** consider the user input before proceeding (if not empty).
## Pre-Execution Checks
**Check for extension hooks (before checklist generation)**:
- Check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.before_checklist` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Pre-Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Pre-Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
Wait for the result of the hook command before proceeding to the Execution Steps.
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently
## Execution Steps
1. **Setup**: Run `.specify/scripts/bash/check-prerequisites.sh --json` from repo root and parse JSON for FEATURE_DIR and AVAILABLE_DOCS list.
@@ -293,3 +327,35 @@ Sample items:
- Correct: Validation of requirement quality
- Wrong: "Does it do X?"
- Correct: "Is X clearly specified?"
## Post-Execution Checks
**Check for extension hooks (after checklist generation)**:
Check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.after_checklist` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently

View File

@@ -14,6 +14,40 @@ $ARGUMENTS
You **MUST** consider the user input before proceeding (if not empty).
## Pre-Execution Checks
**Check for extension hooks (before clarification)**:
- Check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.before_clarify` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Pre-Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Pre-Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
Wait for the result of the hook command before proceeding to the Outline.
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently
## Outline
Goal: Detect and reduce ambiguity or missing decision points in the active feature specification and record the clarifications directly in the spec file.
@@ -179,3 +213,35 @@ Behavior rules:
- If quota reached with unresolved high-impact categories remaining, explicitly flag them under Deferred with rationale.
Context for prioritization: $ARGUMENTS
## Post-Execution Checks
**Check for extension hooks (after clarification)**:
Check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.after_clarify` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently

View File

@@ -14,6 +14,40 @@ $ARGUMENTS
You **MUST** consider the user input before proceeding (if not empty).
## Pre-Execution Checks
**Check for extension hooks (before constitution update)**:
- Check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.before_constitution` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Pre-Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Pre-Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
Wait for the result of the hook command before proceeding to the Outline.
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently
## Outline
You are updating the project constitution at `.specify/memory/constitution.md`. This file is a TEMPLATE containing placeholder tokens in square brackets (e.g. `[PROJECT_NAME]`, `[PRINCIPLE_1_NAME]`). Your job is to (a) collect/derive concrete values, (b) fill the template precisely, and (c) propagate any amendments across dependent artifacts.
@@ -82,3 +116,35 @@ If the user supplies partial updates (e.g., only one principle revision), still
If critical info missing (e.g., ratification date truly unknown), insert `TODO(<FIELD_NAME>): explanation` and include in the Sync Impact Report under deferred items.
Do not create a new template; always operate on the existing `.specify/memory/constitution.md` file.
## Post-Execution Checks
**Check for extension hooks (after constitution update)**:
Check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.after_constitution` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently

View File

@@ -0,0 +1,51 @@
---
description: Auto-commit changes after a Spec Kit command completes
---
<!-- Extension: git -->
<!-- Config: .specify/extensions/git/ -->
# Auto-Commit Changes
Automatically stage and commit all changes after a Spec Kit command completes.
## Behavior
This command is invoked as a hook after (or before) core commands. It:
1. Determines the event name from the hook context (e.g., if invoked as an `after_specify` hook, the event is `after_specify`; if `before_plan`, the event is `before_plan`)
2. Checks `.specify/extensions/git/git-config.yml` for the `auto_commit` section
3. Looks up the specific event key to see if auto-commit is enabled
4. Falls back to `auto_commit.default` if no event-specific key exists
5. Uses the per-command `message` if configured, otherwise a default message
6. If enabled and there are uncommitted changes, runs `git add .` + `git commit`
## Execution
Determine the event name from the hook that triggered this command, then run the script:
- **Bash**: `.specify/extensions/git/scripts/bash/auto-commit.sh <event_name>`
- **PowerShell**: `.specify/extensions/git/scripts/powershell/auto-commit.ps1 <event_name>`
Replace `<event_name>` with the actual hook event (e.g., `after_specify`, `before_plan`, `after_implement`).
## Configuration
In `.specify/extensions/git/git-config.yml`:
```yaml
auto_commit:
default: false # Global toggle — set true to enable for all commands
after_specify:
enabled: true # Override per-command
message: "[Spec Kit] Add specification"
after_plan:
enabled: false
message: "[Spec Kit] Add implementation plan"
```
## Graceful Degradation
- If Git is not available or the current directory is not a repository: skips with a warning
- If no config file exists: skips (disabled by default)
- If no changes to commit: skips with a message

View File

@@ -0,0 +1,70 @@
---
description: Create a feature branch with sequential or timestamp numbering
---
<!-- Extension: git -->
<!-- Config: .specify/extensions/git/ -->
# Create Feature Branch
Create and switch to a new git feature branch for the given specification. This command handles **branch creation only** — the spec directory and files are created by the core `/speckit.specify` workflow.
## User Input
```text
$ARGUMENTS
```
You **MUST** consider the user input before proceeding (if not empty).
## Environment Variable Override
If the user explicitly provided `GIT_BRANCH_NAME` (e.g., via environment variable, argument, or in their request), pass it through to the script by setting the `GIT_BRANCH_NAME` environment variable before invoking the script. When `GIT_BRANCH_NAME` is set:
- The script uses the exact value as the branch name, bypassing all prefix/suffix generation
- `--short-name`, `--number`, and `--timestamp` flags are ignored
- `FEATURE_NUM` is extracted from the name if it starts with a numeric prefix, otherwise set to the full branch name
## Prerequisites
- Verify Git is available by running `git rev-parse --is-inside-work-tree 2>/dev/null`
- If Git is not available, warn the user and skip branch creation
## Branch Numbering Mode
Determine the branch numbering strategy by checking configuration in this order:
1. Check `.specify/extensions/git/git-config.yml` for `branch_numbering` value
2. Check `.specify/init-options.json` for `branch_numbering` value (backward compatibility)
3. Default to `sequential` if neither exists
## Execution
Generate a concise short name (2-4 words) for the branch:
- Analyze the feature description and extract the most meaningful keywords
- Use action-noun format when possible (e.g., "add-user-auth", "fix-payment-bug")
- Preserve technical terms and acronyms (OAuth2, API, JWT, etc.)
Run the appropriate script based on your platform:
- **Bash**: `.specify/extensions/git/scripts/bash/create-new-feature.sh --json --short-name "<short-name>" "<feature description>"`
- **Bash (timestamp)**: `.specify/extensions/git/scripts/bash/create-new-feature.sh --json --timestamp --short-name "<short-name>" "<feature description>"`
- **PowerShell**: `.specify/extensions/git/scripts/powershell/create-new-feature.ps1 -Json -ShortName "<short-name>" "<feature description>"`
- **PowerShell (timestamp)**: `.specify/extensions/git/scripts/powershell/create-new-feature.ps1 -Json -Timestamp -ShortName "<short-name>" "<feature description>"`
**IMPORTANT**:
- Do NOT pass `--number` — the script determines the correct next number automatically
- Always include the JSON flag (`--json` for Bash, `-Json` for PowerShell) so the output can be parsed reliably
- You must only ever run this script once per feature
- The JSON output will contain `BRANCH_NAME` and `FEATURE_NUM`
## Graceful Degradation
If Git is not installed or the current directory is not a Git repository:
- Branch creation is skipped with a warning: `[specify] Warning: Git repository not detected; skipped branch creation`
- The script still outputs `BRANCH_NAME` and `FEATURE_NUM` so the caller can reference them
## Output
The script outputs JSON with:
- `BRANCH_NAME`: The branch name (e.g., `003-user-auth` or `20260319-143022-user-auth`)
- `FEATURE_NUM`: The numeric or timestamp prefix used

View File

@@ -0,0 +1,52 @@
---
description: Initialize a Git repository with an initial commit
---
<!-- Extension: git -->
<!-- Config: .specify/extensions/git/ -->
# Initialize Git Repository
Initialize a Git repository in the current project directory if one does not already exist.
## Execution
Run the appropriate script from the project root:
- **Bash**: `.specify/extensions/git/scripts/bash/initialize-repo.sh`
- **PowerShell**: `.specify/extensions/git/scripts/powershell/initialize-repo.ps1`
If the extension scripts are not found, fall back to:
- **Bash**: `git init && git add . && git commit -m "Initial commit from Specify template"`
- **PowerShell**: `git init; git add .; git commit -m "Initial commit from Specify template"`
The script handles all checks internally:
- Skips if Git is not available
- Skips if already inside a Git repository
- Runs `git init`, `git add .`, and `git commit` with an initial commit message
## Customization
Replace the script to add project-specific Git initialization steps:
- Custom `.gitignore` templates
- Default branch naming (`git config init.defaultBranch`)
- Git LFS setup
- Git hooks installation
- Commit signing configuration
- Git Flow initialization
## Output
On success:
- `✓ Git repository initialized`
## Graceful Degradation
If Git is not installed:
- Warn the user
- Skip repository initialization
- The project continues to function without Git (specs can still be created under `specs/`)
If Git is installed but `git init`, `git add .`, or `git commit` fails:
- Surface the error to the user
- Stop this command rather than continuing with a partially initialized repository

View File

@@ -0,0 +1,48 @@
---
description: Detect Git remote URL for GitHub integration
---
<!-- Extension: git -->
<!-- Config: .specify/extensions/git/ -->
# Detect Git Remote URL
Detect the Git remote URL for integration with GitHub services (e.g., issue creation).
## Prerequisites
- Check if Git is available by running `git rev-parse --is-inside-work-tree 2>/dev/null`
- If Git is not available, output a warning and return empty:
```
[specify] Warning: Git repository not detected; cannot determine remote URL
```
## Execution
Run the following command to get the remote URL:
```bash
git config --get remote.origin.url
```
## Output
Parse the remote URL and determine:
1. **Repository owner**: Extract from the URL (e.g., `github` from `https://github.com/github/spec-kit.git`)
2. **Repository name**: Extract from the URL (e.g., `spec-kit` from `https://github.com/github/spec-kit.git`)
3. **Is GitHub**: Whether the remote points to a GitHub repository
Supported URL formats:
- HTTPS: `https://github.com/<owner>/<repo>.git`
- SSH: `git@github.com:<owner>/<repo>.git`
> [!CAUTION]
> ONLY report a GitHub repository if the remote URL actually points to github.com.
> Do NOT assume the remote is GitHub if the URL format doesn't match.
## Graceful Degradation
If Git is not installed, the directory is not a Git repository, or no remote is configured:
- Return an empty result
- Do NOT error — other workflows should continue without Git remote information

View File

@@ -0,0 +1,52 @@
---
description: Validate current branch follows feature branch naming conventions
---
<!-- Extension: git -->
<!-- Config: .specify/extensions/git/ -->
# Validate Feature Branch
Validate that the current Git branch follows the expected feature branch naming conventions.
## Prerequisites
- Check if Git is available by running `git rev-parse --is-inside-work-tree 2>/dev/null`
- If Git is not available, output a warning and skip validation:
```
[specify] Warning: Git repository not detected; skipped branch validation
```
## Validation Rules
Get the current branch name:
```bash
git rev-parse --abbrev-ref HEAD
```
The branch name must match one of these patterns:
1. **Sequential**: `^[0-9]{3,}-` (e.g., `001-feature-name`, `042-fix-bug`, `1000-big-feature`)
2. **Timestamp**: `^[0-9]{8}-[0-9]{6}-` (e.g., `20260319-143022-feature-name`)
## Execution
If on a feature branch (matches either pattern):
- Output: `✓ On feature branch: <branch-name>`
- Check if the corresponding spec directory exists under `specs/`:
- For sequential branches, look for `specs/<prefix>-*` where prefix matches the numeric portion
- For timestamp branches, look for `specs/<prefix>-*` where prefix matches the `YYYYMMDD-HHMMSS` portion
- If spec directory exists: `✓ Spec directory found: <path>`
- If spec directory missing: `⚠ No spec directory found for prefix <prefix>`
If NOT on a feature branch:
- Output: `✗ Not on a feature branch. Current branch: <branch-name>`
- Output: `Feature branches should be named like: 001-feature-name or 20260319-143022-feature-name`
## Graceful Degradation
If Git is not installed or the directory is not a Git repository:
- Check the `SPECIFY_FEATURE` environment variable as a fallback
- If set, validate that value against the naming patterns
- If not set, skip validation with a warning

View File

@@ -139,15 +139,11 @@ You **MUST** consider the user input before proceeding (if not empty).
- Skip if project is purely internal (build scripts, one-off tools, etc.)
3. **Agent context update**:
- Run `.specify/scripts/bash/update-agent-context.sh copilot`
- These scripts detect which AI agent is in use
- Update the appropriate agent-specific context file
- Add only new technology from current plan
- Preserve manual additions between markers
- Update the plan reference between the `<!-- SPECKIT START -->` and `<!-- SPECKIT END -->` markers in `.github/copilot-instructions.md` to point to the plan file created in step 1 (the IMPL_PLAN path)
**Output**: data-model.md, /contracts/*, quickstart.md, agent-specific file
**Output**: data-model.md, /contracts/*, quickstart.md, updated agent context file
## Key rules
- Use absolute paths
- Use absolute paths for filesystem operations; use project-relative paths for references in documentation and agent context files
- ERROR on gate failures or unresolved clarifications

View File

@@ -58,7 +58,7 @@ The text the user typed after `/speckit.specify` in the triggering message **is*
Given that feature description, do this:
1. **Generate a concise short name** (2-4 words) for the branch:
1. **Generate a concise short name** (2-4 words) for the feature:
- Analyze the feature description and extract the most meaningful keywords
- Create a 2-4 word short name that captures the essence of the feature
- Use action-noun format when possible (e.g., "add-user-auth", "fix-payment-bug")
@@ -70,30 +70,47 @@ Given that feature description, do this:
- "Create a dashboard for analytics" → "analytics-dashboard"
- "Fix payment processing timeout bug" → "fix-payment-timeout"
2. **Create the feature branch** by running the script with `--short-name` (and `--json`). In sequential mode, do NOT pass `--number` — the script auto-detects the next available number. In timestamp mode, the script generates a `YYYYMMDD-HHMMSS` prefix automatically:
2. **Branch creation** (optional, via hook):
**Branch numbering mode**: Before running the script, check if `.specify/init-options.json` exists and read the `branch_numbering` value.
- If `"timestamp"`, add `--timestamp` (Bash) or `-Timestamp` (PowerShell) to the script invocation
- If `"sequential"` or absent, do not add any extra flag (default behavior)
If a `before_specify` hook ran successfully in the Pre-Execution Checks above, it will have created/switched to a git branch and output JSON containing `BRANCH_NAME` and `FEATURE_NUM`. Note these values for reference, but the branch name does **not** dictate the spec directory name.
- Bash example: `.specify/scripts/bash/create-new-feature.sh "$ARGUMENTS" --json --short-name "user-auth" "Add user authentication"`
- Bash (timestamp): `.specify/scripts/bash/create-new-feature.sh "$ARGUMENTS" --json --timestamp --short-name "user-auth" "Add user authentication"`
- PowerShell example: `.specify/scripts/bash/create-new-feature.sh "$ARGUMENTS" -Json -ShortName "user-auth" "Add user authentication"`
- PowerShell (timestamp): `.specify/scripts/bash/create-new-feature.sh "$ARGUMENTS" -Json -Timestamp -ShortName "user-auth" "Add user authentication"`
If the user explicitly provided `GIT_BRANCH_NAME`, pass it through to the hook so the branch script uses the exact value as the branch name (bypassing all prefix/suffix generation).
3. **Create the spec feature directory**:
Specs live under the default `specs/` directory unless the user explicitly provides `SPECIFY_FEATURE_DIRECTORY`.
**Resolution order for `SPECIFY_FEATURE_DIRECTORY`**:
1. If the user explicitly provided `SPECIFY_FEATURE_DIRECTORY` (e.g., via environment variable, argument, or configuration), use it as-is
2. Otherwise, auto-generate it under `specs/`:
- Check `.specify/init-options.json` for `branch_numbering`
- If `"timestamp"`: prefix is `YYYYMMDD-HHMMSS` (current timestamp)
- If `"sequential"` or absent: prefix is `NNN` (next available 3-digit number after scanning existing directories in `specs/`)
- Construct the directory name: `<prefix>-<short-name>` (e.g., `003-user-auth` or `20260319-143022-user-auth`)
- Set `SPECIFY_FEATURE_DIRECTORY` to `specs/<directory-name>`
**Create the directory and spec file**:
- `mkdir -p SPECIFY_FEATURE_DIRECTORY`
- Copy `.specify/templates/spec-template.md` to `SPECIFY_FEATURE_DIRECTORY/spec.md` as the starting point
- Set `SPEC_FILE` to `SPECIFY_FEATURE_DIRECTORY/spec.md`
- Persist the resolved path to `.specify/feature.json`:
```json
{
"feature_directory": "<resolved feature dir>"
}
```
Write the actual resolved directory path value (for example, `specs/003-user-auth`), not the literal string `SPECIFY_FEATURE_DIRECTORY`.
This allows downstream commands (`/speckit.plan`, `/speckit.tasks`, etc.) to locate the feature directory without relying on git branch name conventions.
**IMPORTANT**:
- Do NOT pass `--number` — the script determines the correct next number automatically
- Always include the JSON flag (`--json` for Bash, `-Json` for PowerShell) so the output can be parsed reliably
- You must only ever run this script once per feature
- The JSON is provided in the terminal as output - always refer to it to get the actual content you're looking for
- The JSON output will contain BRANCH_NAME and SPEC_FILE paths
- For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot")
- You must only create one feature per `/speckit.specify` invocation
- The spec directory name and the git branch name are independent — they may be the same but that is the user's choice
- The spec directory and file are always created by this command, never by the hook
3. Load `.specify/templates/spec-template.md` to understand required sections.
4. Load `.specify/templates/spec-template.md` to understand required sections.
4. Follow this execution flow:
1. Parse user description from Input
5. Follow this execution flow:
1. Parse user description from arguments
If empty: ERROR "No feature description provided"
2. Extract key concepts from description
Identify: actors, actions, data, constraints
@@ -117,11 +134,11 @@ Given that feature description, do this:
7. Identify Key Entities (if data involved)
8. Return: SUCCESS (spec ready for planning)
5. Write the specification to SPEC_FILE using the template structure, replacing placeholders with concrete details derived from the feature description (arguments) while preserving section order and headings.
6. Write the specification to SPEC_FILE using the template structure, replacing placeholders with concrete details derived from the feature description (arguments) while preserving section order and headings.
6. **Specification Quality Validation**: After writing the initial spec, validate it against quality criteria:
7. **Specification Quality Validation**: After writing the initial spec, validate it against quality criteria:
a. **Create Spec Quality Checklist**: Generate a checklist file at `FEATURE_DIR/checklists/requirements.md` using the checklist template structure with these validation items:
a. **Create Spec Quality Checklist**: Generate a checklist file at `SPECIFY_FEATURE_DIRECTORY/checklists/requirements.md` using the checklist template structure with these validation items:
```markdown
# Specification Quality Checklist: [FEATURE NAME]
@@ -211,9 +228,13 @@ Given that feature description, do this:
d. **Update Checklist**: After each validation iteration, update the checklist file with current pass/fail status
7. Report completion with branch name, spec file path, checklist results, and readiness for the next phase (`/speckit.clarify` or `/speckit.plan`).
8. **Report completion** to the user with:
- `SPECIFY_FEATURE_DIRECTORY` — the feature directory path
- `SPEC_FILE` — the spec file path
- Checklist results summary
- Readiness for the next phase (`/speckit.clarify` or `/speckit.plan`)
8. **Check for extension hooks**: After reporting completion, check if `.specify/extensions.yml` exists in the project root.
9. **Check for extension hooks**: After reporting completion, check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.after_specify` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
@@ -242,7 +263,7 @@ Given that feature description, do this:
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently
**NOTE:** The script creates and checks out the new branch and initializes the spec file before writing.
**NOTE:** Branch creation is handled by the `before_specify` hook (git extension). Spec directory and file creation are always handled by this core command.
## Quick Guidelines

View File

@@ -11,6 +11,40 @@ $ARGUMENTS
You **MUST** consider the user input before proceeding (if not empty).
## Pre-Execution Checks
**Check for extension hooks (before tasks-to-issues conversion)**:
- Check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.before_taskstoissues` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Pre-Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Pre-Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
Wait for the result of the hook command before proceeding to the Outline.
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently
## Outline
1. Run `.specify/scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\''m Groot' (or double-quote if possible: "I'm Groot").
@@ -28,3 +62,35 @@ git config --get remote.origin.url
> [!CAUTION]
> UNDER NO CIRCUMSTANCES EVER CREATE ISSUES IN REPOSITORIES THAT DO NOT MATCH THE REMOTE URL
## Post-Execution Checks
**Check for extension hooks (after tasks-to-issues conversion)**:
Check if `.specify/extensions.yml` exists in the project root.
- If it exists, read it and look for entries under the `hooks.after_taskstoissues` key
- If the YAML cannot be parsed or is invalid, skip hook checking silently and continue normally
- Filter out hooks where `enabled` is explicitly `false`. Treat hooks without an `enabled` field as enabled by default.
- For each remaining hook, do **not** attempt to interpret or evaluate hook `condition` expressions:
- If the hook has no `condition` field, or it is null/empty, treat the hook as executable
- If the hook defines a non-empty `condition`, skip the hook and leave condition evaluation to the HookExecutor implementation
- For each executable hook, output the following based on its `optional` flag:
- **Optional hook** (`optional: true`):
```
## Extension Hooks
**Optional Hook**: {extension}
Command: `/{command}`
Description: {description}
Prompt: {prompt}
To execute: `/{command}`
```
- **Mandatory hook** (`optional: false`):
```
## Extension Hooks
**Automatic Hook**: {extension}
Executing: `/{command}`
EXECUTE_COMMAND: {command}
```
- If no hooks are registered or `.specify/extensions.yml` does not exist, skip silently

View File

@@ -0,0 +1,110 @@
<!-- SPECKIT START -->
For additional context about technologies to be used, project structure,
shell commands, and other important information, read the current plan at
`specs/001-oidc-proxy-script/plan.md`
<!-- SPECKIT END -->
## Project Overview
`kme-content-adapter` is an HTTP proxy adapter (Node.js ≥18, ESM) that searches and exports documents from KME via a single proxy script executed in a VM sandbox.
## Commands
```bash
npm run dev # Start with --watch (auto-restart on file changes)
npm start # Start, piping logs through jq for pretty-printing
npm test # Run all tests
npm run test:unit # Unit tests only
npm run test:integration # Integration tests only
npm run test:contract # Contract tests only
# Run a single test file
node --test tests/unit/my.test.js
```
Tests use the Node.js built-in test runner (`node:test`). No test framework is installed.
Environment overrides: `PORT`, `HOST`, `LOG_LEVEL`.
## Architecture
The server loads `src/proxyScripts/proxy.js` once at startup using `vm.Script`, then executes it in a **fresh isolated VM context per request** via `vm.createContext`. This mirrors the target deployment environment (IVA Studio proxy script).
```
src/
├── proxyScripts/
│ └── proxy.js # ALL business logic lives here
├── globalVariables/
│ ├── *.json # Runtime data injected into VM context
│ └── adapterHelper.js # Pure utility functions (optional)
├── logger.js # Structured JSON logger
└── server.js # HTTP server bootstrap only
config/
└── default.json # Infrastructure settings (port, host, log level)
```
**Context injection**`server.js` injects these globals into every request context:
| Variable | Source |
|---|---|
| `console` | Custom structured logger (`logger.js`) |
| `crypto` | Node.js Web Crypto API |
| `axios` | HTTP client |
| `jwt` | `jsonwebtoken` |
| `uuidv4` | UUID v4 generator |
| `xmlBuilder` | `xmlbuilder2` `create` function |
| `URLSearchParams`, `URL` | Node.js globals |
| `adapterHelper` | Loaded from `src/globalVariables/adapterHelper.js` (if present) |
| `<name>` | Each JSON/JS file in `src/globalVariables/` (filename → variable name) |
| `req`, `res` | Node.js HTTP request/response |
Routing metadata (`workspaceId`, `branch`, `route`) is attached by `server.js` to `req.params` before invoking the proxy — `proxy.js` must read these from `req.params`, never from `config`.
## Key Conventions
### `src/proxyScripts/proxy.js` — The Only Place for Business Logic
- **ZERO `import`/`export` statements** — the file runs in a VM with no module system access.
- **ZERO access to `config`, `global.config`, or `process.env`** — these are server concerns.
- All dependencies arrive via the injected VM context (see table above).
- Access injected variables directly: `adapter_settings.key`, not `globalThis["adapter_settings"]`.
### `src/globalVariables/adapterHelper.js` — Literal Function Body Pattern
This file contains the **literal body of a function**, not valid standalone JavaScript. `server.js` wraps it at load time:
```javascript
// server.js wraps the file contents like this:
const wrappedCode = `(function() {\n${code}\n})()`;
```
So `adapterHelper.js` must end with a bare `return { ... }` that exports the helpers object. It must have zero imports/exports and contain only pure utilities (validators, formatters, XML helpers, error mappers). Authentication, API calls, and state must stay in `proxy.js`.
### `src/globalVariables/` — Filename Is the Variable Name
Every file loaded from this directory is injected into the VM context using its filename (without extension) as the key:
- `adapter_settings.json` → available as `adapter_settings` in `proxy.js`
- `adapterHelper.js` → available as `adapterHelper` in `proxy.js`
- Files matching `*.example.*` are skipped.
- JSON files are loaded before JS files (so JS modules can reference JSON data).
### Config vs. Secrets
- `config/default.json` — infrastructure only: `server.port`, `server.host`, `logging.level`, `proxy.*`
- Credentials, API keys, and behavioral config → JSON files in `src/globalVariables/`
### Logging
Use the `logger` from `src/logger.js` in server-side code. It accepts either a string or a structured object:
```javascript
logger.info("Simple message");
logger.info({ message: "Structured", requestId: "abc", status: 200 });
```
In `proxy.js`, use the injected `console` object (same API).
### Challenging New Files
Before adding any file to `src/`, verify it belongs to one of the five allowed categories (server.js, logger.js, proxyScripts/proxy.js, globalVariables/*.json, globalVariables/adapterHelper.js). New files require explicit justification against the monolithic architecture constraint.

View File

@@ -0,0 +1,3 @@
---
agent: speckit.git.commit
---

View File

@@ -0,0 +1,3 @@
---
agent: speckit.git.feature
---

View File

@@ -0,0 +1,3 @@
---
agent: speckit.git.initialize
---

View File

@@ -0,0 +1,3 @@
---
agent: speckit.git.remote
---

View File

@@ -0,0 +1,3 @@
---
agent: speckit.git.validate
---

1
.gitignore vendored
View File

@@ -8,6 +8,7 @@ node_modules/
# Service Account credentials (NEVER commit!)
global/*.json
src/globalVariables/kme_CSA_settings.json
# Logs
*.log

148
.specify/extensions.yml Normal file
View File

@@ -0,0 +1,148 @@
installed: []
settings:
auto_execute_hooks: true
hooks:
before_constitution:
- extension: git
command: speckit.git.initialize
enabled: true
optional: false
prompt: Execute speckit.git.initialize?
description: Initialize Git repository before constitution setup
condition: null
before_specify:
- extension: git
command: speckit.git.feature
enabled: true
optional: false
prompt: Execute speckit.git.feature?
description: Create feature branch before specification
condition: null
before_clarify:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit outstanding changes before clarification?
description: Auto-commit before spec clarification
condition: null
before_plan:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit outstanding changes before planning?
description: Auto-commit before implementation planning
condition: null
before_tasks:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit outstanding changes before task generation?
description: Auto-commit before task generation
condition: null
before_implement:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit outstanding changes before implementation?
description: Auto-commit before implementation
condition: null
before_checklist:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit outstanding changes before checklist?
description: Auto-commit before checklist generation
condition: null
before_analyze:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit outstanding changes before analysis?
description: Auto-commit before analysis
condition: null
before_taskstoissues:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit outstanding changes before issue sync?
description: Auto-commit before tasks-to-issues conversion
condition: null
after_constitution:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit constitution changes?
description: Auto-commit after constitution update
condition: null
after_specify:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit specification changes?
description: Auto-commit after specification
condition: null
after_clarify:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit clarification changes?
description: Auto-commit after spec clarification
condition: null
after_plan:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit plan changes?
description: Auto-commit after implementation planning
condition: null
after_tasks:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit task changes?
description: Auto-commit after task generation
condition: null
after_implement:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit implementation changes?
description: Auto-commit after implementation
condition: null
after_checklist:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit checklist changes?
description: Auto-commit after checklist generation
condition: null
after_analyze:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit analysis results?
description: Auto-commit after analysis
condition: null
after_taskstoissues:
- extension: git
command: speckit.git.commit
enabled: true
optional: true
prompt: Commit after syncing issues?
description: Auto-commit after tasks-to-issues conversion
condition: null

View File

@@ -0,0 +1,23 @@
{
"schema_version": "1.0",
"extensions": {
"git": {
"version": "1.0.0",
"source": "local",
"manifest_hash": "sha256:9731aa8143a72fbebfdb440f155038ab42642517c2b2bdbbf67c8fdbe076ed79",
"enabled": true,
"priority": 10,
"registered_commands": {
"copilot": [
"speckit.git.feature",
"speckit.git.validate",
"speckit.git.remote",
"speckit.git.initialize",
"speckit.git.commit"
]
},
"registered_skills": [],
"installed_at": "2026-04-22T23:06:00.985659+00:00"
}
}
}

View File

@@ -0,0 +1,100 @@
# Git Branching Workflow Extension
Git repository initialization, feature branch creation, numbering (sequential/timestamp), validation, remote detection, and auto-commit for Spec Kit.
## Overview
This extension provides Git operations as an optional, self-contained module. It manages:
- **Repository initialization** with configurable commit messages
- **Feature branch creation** with sequential (`001-feature-name`) or timestamp (`20260319-143022-feature-name`) numbering
- **Branch validation** to ensure branches follow naming conventions
- **Git remote detection** for GitHub integration (e.g., issue creation)
- **Auto-commit** after core commands (configurable per-command with custom messages)
## Commands
| Command | Description |
|---------|-------------|
| `speckit.git.initialize` | Initialize a Git repository with a configurable commit message |
| `speckit.git.feature` | Create a feature branch with sequential or timestamp numbering |
| `speckit.git.validate` | Validate current branch follows feature branch naming conventions |
| `speckit.git.remote` | Detect Git remote URL for GitHub integration |
| `speckit.git.commit` | Auto-commit changes (configurable per-command enable/disable and messages) |
## Hooks
| Event | Command | Optional | Description |
|-------|---------|----------|-------------|
| `before_constitution` | `speckit.git.initialize` | No | Init git repo before constitution |
| `before_specify` | `speckit.git.feature` | No | Create feature branch before specification |
| `before_clarify` | `speckit.git.commit` | Yes | Commit outstanding changes before clarification |
| `before_plan` | `speckit.git.commit` | Yes | Commit outstanding changes before planning |
| `before_tasks` | `speckit.git.commit` | Yes | Commit outstanding changes before task generation |
| `before_implement` | `speckit.git.commit` | Yes | Commit outstanding changes before implementation |
| `before_checklist` | `speckit.git.commit` | Yes | Commit outstanding changes before checklist |
| `before_analyze` | `speckit.git.commit` | Yes | Commit outstanding changes before analysis |
| `before_taskstoissues` | `speckit.git.commit` | Yes | Commit outstanding changes before issue sync |
| `after_constitution` | `speckit.git.commit` | Yes | Auto-commit after constitution update |
| `after_specify` | `speckit.git.commit` | Yes | Auto-commit after specification |
| `after_clarify` | `speckit.git.commit` | Yes | Auto-commit after clarification |
| `after_plan` | `speckit.git.commit` | Yes | Auto-commit after planning |
| `after_tasks` | `speckit.git.commit` | Yes | Auto-commit after task generation |
| `after_implement` | `speckit.git.commit` | Yes | Auto-commit after implementation |
| `after_checklist` | `speckit.git.commit` | Yes | Auto-commit after checklist |
| `after_analyze` | `speckit.git.commit` | Yes | Auto-commit after analysis |
| `after_taskstoissues` | `speckit.git.commit` | Yes | Auto-commit after issue sync |
## Configuration
Configuration is stored in `.specify/extensions/git/git-config.yml`:
```yaml
# Branch numbering strategy: "sequential" or "timestamp"
branch_numbering: sequential
# Custom commit message for git init
init_commit_message: "[Spec Kit] Initial commit"
# Auto-commit per command (all disabled by default)
# Example: enable auto-commit after specify
auto_commit:
default: false
after_specify:
enabled: true
message: "[Spec Kit] Add specification"
```
## Installation
```bash
# Install the bundled git extension (no network required)
specify extension add git
```
## Disabling
```bash
# Disable the git extension (spec creation continues without branching)
specify extension disable git
# Re-enable it
specify extension enable git
```
## Graceful Degradation
When Git is not installed or the directory is not a Git repository:
- Spec directories are still created under `specs/`
- Branch creation is skipped with a warning
- Branch validation is skipped with a warning
- Remote detection returns empty results
## Scripts
The extension bundles cross-platform scripts:
- `scripts/bash/create-new-feature.sh` — Bash implementation
- `scripts/bash/git-common.sh` — Shared Git utilities (Bash)
- `scripts/powershell/create-new-feature.ps1` — PowerShell implementation
- `scripts/powershell/git-common.ps1` — Shared Git utilities (PowerShell)

View File

@@ -0,0 +1,48 @@
---
description: "Auto-commit changes after a Spec Kit command completes"
---
# Auto-Commit Changes
Automatically stage and commit all changes after a Spec Kit command completes.
## Behavior
This command is invoked as a hook after (or before) core commands. It:
1. Determines the event name from the hook context (e.g., if invoked as an `after_specify` hook, the event is `after_specify`; if `before_plan`, the event is `before_plan`)
2. Checks `.specify/extensions/git/git-config.yml` for the `auto_commit` section
3. Looks up the specific event key to see if auto-commit is enabled
4. Falls back to `auto_commit.default` if no event-specific key exists
5. Uses the per-command `message` if configured, otherwise a default message
6. If enabled and there are uncommitted changes, runs `git add .` + `git commit`
## Execution
Determine the event name from the hook that triggered this command, then run the script:
- **Bash**: `.specify/extensions/git/scripts/bash/auto-commit.sh <event_name>`
- **PowerShell**: `.specify/extensions/git/scripts/powershell/auto-commit.ps1 <event_name>`
Replace `<event_name>` with the actual hook event (e.g., `after_specify`, `before_plan`, `after_implement`).
## Configuration
In `.specify/extensions/git/git-config.yml`:
```yaml
auto_commit:
default: false # Global toggle — set true to enable for all commands
after_specify:
enabled: true # Override per-command
message: "[Spec Kit] Add specification"
after_plan:
enabled: false
message: "[Spec Kit] Add implementation plan"
```
## Graceful Degradation
- If Git is not available or the current directory is not a repository: skips with a warning
- If no config file exists: skips (disabled by default)
- If no changes to commit: skips with a message

View File

@@ -0,0 +1,67 @@
---
description: "Create a feature branch with sequential or timestamp numbering"
---
# Create Feature Branch
Create and switch to a new git feature branch for the given specification. This command handles **branch creation only** — the spec directory and files are created by the core `/speckit.specify` workflow.
## User Input
```text
$ARGUMENTS
```
You **MUST** consider the user input before proceeding (if not empty).
## Environment Variable Override
If the user explicitly provided `GIT_BRANCH_NAME` (e.g., via environment variable, argument, or in their request), pass it through to the script by setting the `GIT_BRANCH_NAME` environment variable before invoking the script. When `GIT_BRANCH_NAME` is set:
- The script uses the exact value as the branch name, bypassing all prefix/suffix generation
- `--short-name`, `--number`, and `--timestamp` flags are ignored
- `FEATURE_NUM` is extracted from the name if it starts with a numeric prefix, otherwise set to the full branch name
## Prerequisites
- Verify Git is available by running `git rev-parse --is-inside-work-tree 2>/dev/null`
- If Git is not available, warn the user and skip branch creation
## Branch Numbering Mode
Determine the branch numbering strategy by checking configuration in this order:
1. Check `.specify/extensions/git/git-config.yml` for `branch_numbering` value
2. Check `.specify/init-options.json` for `branch_numbering` value (backward compatibility)
3. Default to `sequential` if neither exists
## Execution
Generate a concise short name (2-4 words) for the branch:
- Analyze the feature description and extract the most meaningful keywords
- Use action-noun format when possible (e.g., "add-user-auth", "fix-payment-bug")
- Preserve technical terms and acronyms (OAuth2, API, JWT, etc.)
Run the appropriate script based on your platform:
- **Bash**: `.specify/extensions/git/scripts/bash/create-new-feature.sh --json --short-name "<short-name>" "<feature description>"`
- **Bash (timestamp)**: `.specify/extensions/git/scripts/bash/create-new-feature.sh --json --timestamp --short-name "<short-name>" "<feature description>"`
- **PowerShell**: `.specify/extensions/git/scripts/powershell/create-new-feature.ps1 -Json -ShortName "<short-name>" "<feature description>"`
- **PowerShell (timestamp)**: `.specify/extensions/git/scripts/powershell/create-new-feature.ps1 -Json -Timestamp -ShortName "<short-name>" "<feature description>"`
**IMPORTANT**:
- Do NOT pass `--number` — the script determines the correct next number automatically
- Always include the JSON flag (`--json` for Bash, `-Json` for PowerShell) so the output can be parsed reliably
- You must only ever run this script once per feature
- The JSON output will contain `BRANCH_NAME` and `FEATURE_NUM`
## Graceful Degradation
If Git is not installed or the current directory is not a Git repository:
- Branch creation is skipped with a warning: `[specify] Warning: Git repository not detected; skipped branch creation`
- The script still outputs `BRANCH_NAME` and `FEATURE_NUM` so the caller can reference them
## Output
The script outputs JSON with:
- `BRANCH_NAME`: The branch name (e.g., `003-user-auth` or `20260319-143022-user-auth`)
- `FEATURE_NUM`: The numeric or timestamp prefix used

View File

@@ -0,0 +1,49 @@
---
description: "Initialize a Git repository with an initial commit"
---
# Initialize Git Repository
Initialize a Git repository in the current project directory if one does not already exist.
## Execution
Run the appropriate script from the project root:
- **Bash**: `.specify/extensions/git/scripts/bash/initialize-repo.sh`
- **PowerShell**: `.specify/extensions/git/scripts/powershell/initialize-repo.ps1`
If the extension scripts are not found, fall back to:
- **Bash**: `git init && git add . && git commit -m "Initial commit from Specify template"`
- **PowerShell**: `git init; git add .; git commit -m "Initial commit from Specify template"`
The script handles all checks internally:
- Skips if Git is not available
- Skips if already inside a Git repository
- Runs `git init`, `git add .`, and `git commit` with an initial commit message
## Customization
Replace the script to add project-specific Git initialization steps:
- Custom `.gitignore` templates
- Default branch naming (`git config init.defaultBranch`)
- Git LFS setup
- Git hooks installation
- Commit signing configuration
- Git Flow initialization
## Output
On success:
- `✓ Git repository initialized`
## Graceful Degradation
If Git is not installed:
- Warn the user
- Skip repository initialization
- The project continues to function without Git (specs can still be created under `specs/`)
If Git is installed but `git init`, `git add .`, or `git commit` fails:
- Surface the error to the user
- Stop this command rather than continuing with a partially initialized repository

View File

@@ -0,0 +1,45 @@
---
description: "Detect Git remote URL for GitHub integration"
---
# Detect Git Remote URL
Detect the Git remote URL for integration with GitHub services (e.g., issue creation).
## Prerequisites
- Check if Git is available by running `git rev-parse --is-inside-work-tree 2>/dev/null`
- If Git is not available, output a warning and return empty:
```
[specify] Warning: Git repository not detected; cannot determine remote URL
```
## Execution
Run the following command to get the remote URL:
```bash
git config --get remote.origin.url
```
## Output
Parse the remote URL and determine:
1. **Repository owner**: Extract from the URL (e.g., `github` from `https://github.com/github/spec-kit.git`)
2. **Repository name**: Extract from the URL (e.g., `spec-kit` from `https://github.com/github/spec-kit.git`)
3. **Is GitHub**: Whether the remote points to a GitHub repository
Supported URL formats:
- HTTPS: `https://github.com/<owner>/<repo>.git`
- SSH: `git@github.com:<owner>/<repo>.git`
> [!CAUTION]
> ONLY report a GitHub repository if the remote URL actually points to github.com.
> Do NOT assume the remote is GitHub if the URL format doesn't match.
## Graceful Degradation
If Git is not installed, the directory is not a Git repository, or no remote is configured:
- Return an empty result
- Do NOT error — other workflows should continue without Git remote information

View File

@@ -0,0 +1,49 @@
---
description: "Validate current branch follows feature branch naming conventions"
---
# Validate Feature Branch
Validate that the current Git branch follows the expected feature branch naming conventions.
## Prerequisites
- Check if Git is available by running `git rev-parse --is-inside-work-tree 2>/dev/null`
- If Git is not available, output a warning and skip validation:
```
[specify] Warning: Git repository not detected; skipped branch validation
```
## Validation Rules
Get the current branch name:
```bash
git rev-parse --abbrev-ref HEAD
```
The branch name must match one of these patterns:
1. **Sequential**: `^[0-9]{3,}-` (e.g., `001-feature-name`, `042-fix-bug`, `1000-big-feature`)
2. **Timestamp**: `^[0-9]{8}-[0-9]{6}-` (e.g., `20260319-143022-feature-name`)
## Execution
If on a feature branch (matches either pattern):
- Output: `✓ On feature branch: <branch-name>`
- Check if the corresponding spec directory exists under `specs/`:
- For sequential branches, look for `specs/<prefix>-*` where prefix matches the numeric portion
- For timestamp branches, look for `specs/<prefix>-*` where prefix matches the `YYYYMMDD-HHMMSS` portion
- If spec directory exists: `✓ Spec directory found: <path>`
- If spec directory missing: `⚠ No spec directory found for prefix <prefix>`
If NOT on a feature branch:
- Output: `✗ Not on a feature branch. Current branch: <branch-name>`
- Output: `Feature branches should be named like: 001-feature-name or 20260319-143022-feature-name`
## Graceful Degradation
If Git is not installed or the directory is not a Git repository:
- Check the `SPECIFY_FEATURE` environment variable as a fallback
- If set, validate that value against the naming patterns
- If not set, skip validation with a warning

View File

@@ -0,0 +1,62 @@
# Git Branching Workflow Extension Configuration
# Copied to .specify/extensions/git/git-config.yml on install
# Branch numbering strategy: "sequential" (001, 002, ...) or "timestamp" (YYYYMMDD-HHMMSS)
branch_numbering: sequential
# Commit message used by `git commit` during repository initialization
init_commit_message: "[Spec Kit] Initial commit"
# Auto-commit before/after core commands.
# Set "default" to enable for all commands, then override per-command.
# Each key can be true/false. Message is customizable per-command.
auto_commit:
default: false
before_clarify:
enabled: false
message: "[Spec Kit] Save progress before clarification"
before_plan:
enabled: false
message: "[Spec Kit] Save progress before planning"
before_tasks:
enabled: false
message: "[Spec Kit] Save progress before task generation"
before_implement:
enabled: false
message: "[Spec Kit] Save progress before implementation"
before_checklist:
enabled: false
message: "[Spec Kit] Save progress before checklist"
before_analyze:
enabled: false
message: "[Spec Kit] Save progress before analysis"
before_taskstoissues:
enabled: false
message: "[Spec Kit] Save progress before issue sync"
after_constitution:
enabled: false
message: "[Spec Kit] Add project constitution"
after_specify:
enabled: false
message: "[Spec Kit] Add specification"
after_clarify:
enabled: false
message: "[Spec Kit] Clarify specification"
after_plan:
enabled: false
message: "[Spec Kit] Add implementation plan"
after_tasks:
enabled: false
message: "[Spec Kit] Add tasks"
after_implement:
enabled: false
message: "[Spec Kit] Implementation progress"
after_checklist:
enabled: false
message: "[Spec Kit] Add checklist"
after_analyze:
enabled: false
message: "[Spec Kit] Add analysis report"
after_taskstoissues:
enabled: false
message: "[Spec Kit] Sync tasks to issues"

View File

@@ -0,0 +1,140 @@
schema_version: "1.0"
extension:
id: git
name: "Git Branching Workflow"
version: "1.0.0"
description: "Feature branch creation, numbering (sequential/timestamp), validation, and Git remote detection"
author: spec-kit-core
repository: https://github.com/github/spec-kit
license: MIT
requires:
speckit_version: ">=0.2.0"
tools:
- name: git
required: false
provides:
commands:
- name: speckit.git.feature
file: commands/speckit.git.feature.md
description: "Create a feature branch with sequential or timestamp numbering"
- name: speckit.git.validate
file: commands/speckit.git.validate.md
description: "Validate current branch follows feature branch naming conventions"
- name: speckit.git.remote
file: commands/speckit.git.remote.md
description: "Detect Git remote URL for GitHub integration"
- name: speckit.git.initialize
file: commands/speckit.git.initialize.md
description: "Initialize a Git repository with an initial commit"
- name: speckit.git.commit
file: commands/speckit.git.commit.md
description: "Auto-commit changes after a Spec Kit command completes"
config:
- name: "git-config.yml"
template: "config-template.yml"
description: "Git branching configuration"
required: false
hooks:
before_constitution:
command: speckit.git.initialize
optional: false
description: "Initialize Git repository before constitution setup"
before_specify:
command: speckit.git.feature
optional: false
description: "Create feature branch before specification"
before_clarify:
command: speckit.git.commit
optional: true
prompt: "Commit outstanding changes before clarification?"
description: "Auto-commit before spec clarification"
before_plan:
command: speckit.git.commit
optional: true
prompt: "Commit outstanding changes before planning?"
description: "Auto-commit before implementation planning"
before_tasks:
command: speckit.git.commit
optional: true
prompt: "Commit outstanding changes before task generation?"
description: "Auto-commit before task generation"
before_implement:
command: speckit.git.commit
optional: true
prompt: "Commit outstanding changes before implementation?"
description: "Auto-commit before implementation"
before_checklist:
command: speckit.git.commit
optional: true
prompt: "Commit outstanding changes before checklist?"
description: "Auto-commit before checklist generation"
before_analyze:
command: speckit.git.commit
optional: true
prompt: "Commit outstanding changes before analysis?"
description: "Auto-commit before analysis"
before_taskstoissues:
command: speckit.git.commit
optional: true
prompt: "Commit outstanding changes before issue sync?"
description: "Auto-commit before tasks-to-issues conversion"
after_constitution:
command: speckit.git.commit
optional: true
prompt: "Commit constitution changes?"
description: "Auto-commit after constitution update"
after_specify:
command: speckit.git.commit
optional: true
prompt: "Commit specification changes?"
description: "Auto-commit after specification"
after_clarify:
command: speckit.git.commit
optional: true
prompt: "Commit clarification changes?"
description: "Auto-commit after spec clarification"
after_plan:
command: speckit.git.commit
optional: true
prompt: "Commit plan changes?"
description: "Auto-commit after implementation planning"
after_tasks:
command: speckit.git.commit
optional: true
prompt: "Commit task changes?"
description: "Auto-commit after task generation"
after_implement:
command: speckit.git.commit
optional: true
prompt: "Commit implementation changes?"
description: "Auto-commit after implementation"
after_checklist:
command: speckit.git.commit
optional: true
prompt: "Commit checklist changes?"
description: "Auto-commit after checklist generation"
after_analyze:
command: speckit.git.commit
optional: true
prompt: "Commit analysis results?"
description: "Auto-commit after analysis"
after_taskstoissues:
command: speckit.git.commit
optional: true
prompt: "Commit after syncing issues?"
description: "Auto-commit after tasks-to-issues conversion"
tags:
- "git"
- "branching"
- "workflow"
config:
defaults:
branch_numbering: sequential
init_commit_message: "[Spec Kit] Initial commit"

View File

@@ -0,0 +1,62 @@
# Git Branching Workflow Extension Configuration
# Copied to .specify/extensions/git/git-config.yml on install
# Branch numbering strategy: "sequential" (001, 002, ...) or "timestamp" (YYYYMMDD-HHMMSS)
branch_numbering: sequential
# Commit message used by `git commit` during repository initialization
init_commit_message: "[Spec Kit] Initial commit"
# Auto-commit before/after core commands.
# Set "default" to enable for all commands, then override per-command.
# Each key can be true/false. Message is customizable per-command.
auto_commit:
default: false
before_clarify:
enabled: false
message: "[Spec Kit] Save progress before clarification"
before_plan:
enabled: false
message: "[Spec Kit] Save progress before planning"
before_tasks:
enabled: false
message: "[Spec Kit] Save progress before task generation"
before_implement:
enabled: false
message: "[Spec Kit] Save progress before implementation"
before_checklist:
enabled: false
message: "[Spec Kit] Save progress before checklist"
before_analyze:
enabled: false
message: "[Spec Kit] Save progress before analysis"
before_taskstoissues:
enabled: false
message: "[Spec Kit] Save progress before issue sync"
after_constitution:
enabled: false
message: "[Spec Kit] Add project constitution"
after_specify:
enabled: false
message: "[Spec Kit] Add specification"
after_clarify:
enabled: false
message: "[Spec Kit] Clarify specification"
after_plan:
enabled: false
message: "[Spec Kit] Add implementation plan"
after_tasks:
enabled: false
message: "[Spec Kit] Add tasks"
after_implement:
enabled: true
message: "[Spec Kit] Implementation progress"
after_checklist:
enabled: false
message: "[Spec Kit] Add checklist"
after_analyze:
enabled: false
message: "[Spec Kit] Add analysis report"
after_taskstoissues:
enabled: false
message: "[Spec Kit] Sync tasks to issues"

View File

@@ -0,0 +1,140 @@
#!/usr/bin/env bash
# Git extension: auto-commit.sh
# Automatically commit changes after a Spec Kit command completes.
# Checks per-command config keys in git-config.yml before committing.
#
# Usage: auto-commit.sh <event_name>
# e.g.: auto-commit.sh after_specify
set -e
EVENT_NAME="${1:-}"
if [ -z "$EVENT_NAME" ]; then
echo "Usage: $0 <event_name>" >&2
exit 1
fi
SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
_find_project_root() {
local dir="$1"
while [ "$dir" != "/" ]; do
if [ -d "$dir/.specify" ] || [ -d "$dir/.git" ]; then
echo "$dir"
return 0
fi
dir="$(dirname "$dir")"
done
return 1
}
REPO_ROOT=$(_find_project_root "$SCRIPT_DIR") || REPO_ROOT="$(pwd)"
cd "$REPO_ROOT"
# Check if git is available
if ! command -v git >/dev/null 2>&1; then
echo "[specify] Warning: Git not found; skipped auto-commit" >&2
exit 0
fi
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
echo "[specify] Warning: Not a Git repository; skipped auto-commit" >&2
exit 0
fi
# Read per-command config from git-config.yml
_config_file="$REPO_ROOT/.specify/extensions/git/git-config.yml"
_enabled=false
_commit_msg=""
if [ -f "$_config_file" ]; then
# Parse the auto_commit section for this event.
# Look for auto_commit.<event_name>.enabled and .message
# Also check auto_commit.default as fallback.
_in_auto_commit=false
_in_event=false
_default_enabled=false
while IFS= read -r _line; do
# Detect auto_commit: section
if echo "$_line" | grep -q '^auto_commit:'; then
_in_auto_commit=true
_in_event=false
continue
fi
# Exit auto_commit section on next top-level key
if $_in_auto_commit && echo "$_line" | grep -Eq '^[a-z]'; then
break
fi
if $_in_auto_commit; then
# Check default key
if echo "$_line" | grep -Eq "^[[:space:]]+default:[[:space:]]"; then
_val=$(echo "$_line" | sed 's/^[^:]*:[[:space:]]*//' | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]')
[ "$_val" = "true" ] && _default_enabled=true
fi
# Detect our event subsection
if echo "$_line" | grep -Eq "^[[:space:]]+${EVENT_NAME}:"; then
_in_event=true
continue
fi
# Inside our event subsection
if $_in_event; then
# Exit on next sibling key (same indent level as event name)
if echo "$_line" | grep -Eq '^[[:space:]]{2}[a-z]' && ! echo "$_line" | grep -Eq '^[[:space:]]{4}'; then
_in_event=false
continue
fi
if echo "$_line" | grep -Eq '[[:space:]]+enabled:'; then
_val=$(echo "$_line" | sed 's/^[^:]*:[[:space:]]*//' | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]')
[ "$_val" = "true" ] && _enabled=true
[ "$_val" = "false" ] && _enabled=false
fi
if echo "$_line" | grep -Eq '[[:space:]]+message:'; then
_commit_msg=$(echo "$_line" | sed 's/^[^:]*:[[:space:]]*//' | sed 's/^["'\'']//' | sed 's/["'\'']*$//')
fi
fi
fi
done < "$_config_file"
# If event-specific key not found, use default
if [ "$_enabled" = "false" ] && [ "$_default_enabled" = "true" ]; then
# Only use default if the event wasn't explicitly set to false
# Check if event section existed at all
if ! grep -q "^[[:space:]]*${EVENT_NAME}:" "$_config_file" 2>/dev/null; then
_enabled=true
fi
fi
else
# No config file — auto-commit disabled by default
exit 0
fi
if [ "$_enabled" != "true" ]; then
exit 0
fi
# Check if there are changes to commit
if git diff --quiet HEAD 2>/dev/null && git diff --cached --quiet 2>/dev/null && [ -z "$(git ls-files --others --exclude-standard 2>/dev/null)" ]; then
echo "[specify] No changes to commit after $EVENT_NAME" >&2
exit 0
fi
# Derive a human-readable command name from the event
# e.g., after_specify -> specify, before_plan -> plan
_command_name=$(echo "$EVENT_NAME" | sed 's/^after_//' | sed 's/^before_//')
_phase=$(echo "$EVENT_NAME" | grep -q '^before_' && echo 'before' || echo 'after')
# Use custom message if configured, otherwise default
if [ -z "$_commit_msg" ]; then
_commit_msg="[Spec Kit] Auto-commit ${_phase} ${_command_name}"
fi
# Stage and commit
_git_out=$(git add . 2>&1) || { echo "[specify] Error: git add failed: $_git_out" >&2; exit 1; }
_git_out=$(git commit -q -m "$_commit_msg" 2>&1) || { echo "[specify] Error: git commit failed: $_git_out" >&2; exit 1; }
echo "[OK] Changes committed ${_phase} ${_command_name}" >&2

View File

@@ -0,0 +1,453 @@
#!/usr/bin/env bash
# Git extension: create-new-feature.sh
# Adapted from core scripts/bash/create-new-feature.sh for extension layout.
# Sources common.sh from the project's installed scripts, falling back to
# git-common.sh for minimal git helpers.
set -e
JSON_MODE=false
DRY_RUN=false
ALLOW_EXISTING=false
SHORT_NAME=""
BRANCH_NUMBER=""
USE_TIMESTAMP=false
ARGS=()
i=1
while [ $i -le $# ]; do
arg="${!i}"
case "$arg" in
--json)
JSON_MODE=true
;;
--dry-run)
DRY_RUN=true
;;
--allow-existing-branch)
ALLOW_EXISTING=true
;;
--short-name)
if [ $((i + 1)) -gt $# ]; then
echo 'Error: --short-name requires a value' >&2
exit 1
fi
i=$((i + 1))
next_arg="${!i}"
if [[ "$next_arg" == --* ]]; then
echo 'Error: --short-name requires a value' >&2
exit 1
fi
SHORT_NAME="$next_arg"
;;
--number)
if [ $((i + 1)) -gt $# ]; then
echo 'Error: --number requires a value' >&2
exit 1
fi
i=$((i + 1))
next_arg="${!i}"
if [[ "$next_arg" == --* ]]; then
echo 'Error: --number requires a value' >&2
exit 1
fi
BRANCH_NUMBER="$next_arg"
if [[ ! "$BRANCH_NUMBER" =~ ^[0-9]+$ ]]; then
echo 'Error: --number must be a non-negative integer' >&2
exit 1
fi
;;
--timestamp)
USE_TIMESTAMP=true
;;
--help|-h)
echo "Usage: $0 [--json] [--dry-run] [--allow-existing-branch] [--short-name <name>] [--number N] [--timestamp] <feature_description>"
echo ""
echo "Options:"
echo " --json Output in JSON format"
echo " --dry-run Compute branch name without creating the branch"
echo " --allow-existing-branch Switch to branch if it already exists instead of failing"
echo " --short-name <name> Provide a custom short name (2-4 words) for the branch"
echo " --number N Specify branch number manually (overrides auto-detection)"
echo " --timestamp Use timestamp prefix (YYYYMMDD-HHMMSS) instead of sequential numbering"
echo " --help, -h Show this help message"
echo ""
echo "Environment variables:"
echo " GIT_BRANCH_NAME Use this exact branch name, bypassing all prefix/suffix generation"
echo ""
echo "Examples:"
echo " $0 'Add user authentication system' --short-name 'user-auth'"
echo " $0 'Implement OAuth2 integration for API' --number 5"
echo " $0 --timestamp --short-name 'user-auth' 'Add user authentication'"
echo " GIT_BRANCH_NAME=my-branch $0 'feature description'"
exit 0
;;
*)
ARGS+=("$arg")
;;
esac
i=$((i + 1))
done
FEATURE_DESCRIPTION="${ARGS[*]}"
if [ -z "$FEATURE_DESCRIPTION" ]; then
echo "Usage: $0 [--json] [--dry-run] [--allow-existing-branch] [--short-name <name>] [--number N] [--timestamp] <feature_description>" >&2
exit 1
fi
# Trim whitespace and validate description is not empty
FEATURE_DESCRIPTION=$(echo "$FEATURE_DESCRIPTION" | xargs)
if [ -z "$FEATURE_DESCRIPTION" ]; then
echo "Error: Feature description cannot be empty or contain only whitespace" >&2
exit 1
fi
# Function to get highest number from specs directory
get_highest_from_specs() {
local specs_dir="$1"
local highest=0
if [ -d "$specs_dir" ]; then
for dir in "$specs_dir"/*; do
[ -d "$dir" ] || continue
dirname=$(basename "$dir")
# Match sequential prefixes (>=3 digits), but skip timestamp dirs.
if echo "$dirname" | grep -Eq '^[0-9]{3,}-' && ! echo "$dirname" | grep -Eq '^[0-9]{8}-[0-9]{6}-'; then
number=$(echo "$dirname" | grep -Eo '^[0-9]+')
number=$((10#$number))
if [ "$number" -gt "$highest" ]; then
highest=$number
fi
fi
done
fi
echo "$highest"
}
# Function to get highest number from git branches
get_highest_from_branches() {
git branch -a 2>/dev/null | sed 's/^[* ]*//; s|^remotes/[^/]*/||' | _extract_highest_number
}
# Extract the highest sequential feature number from a list of ref names (one per line).
_extract_highest_number() {
local highest=0
while IFS= read -r name; do
[ -z "$name" ] && continue
if echo "$name" | grep -Eq '^[0-9]{3,}-' && ! echo "$name" | grep -Eq '^[0-9]{8}-[0-9]{6}-'; then
number=$(echo "$name" | grep -Eo '^[0-9]+' || echo "0")
number=$((10#$number))
if [ "$number" -gt "$highest" ]; then
highest=$number
fi
fi
done
echo "$highest"
}
# Function to get highest number from remote branches without fetching (side-effect-free)
get_highest_from_remote_refs() {
local highest=0
for remote in $(git remote 2>/dev/null); do
local remote_highest
remote_highest=$(GIT_TERMINAL_PROMPT=0 git ls-remote --heads "$remote" 2>/dev/null | sed 's|.*refs/heads/||' | _extract_highest_number)
if [ "$remote_highest" -gt "$highest" ]; then
highest=$remote_highest
fi
done
echo "$highest"
}
# Function to check existing branches and return next available number.
check_existing_branches() {
local specs_dir="$1"
local skip_fetch="${2:-false}"
if [ "$skip_fetch" = true ]; then
local highest_remote=$(get_highest_from_remote_refs)
local highest_branch=$(get_highest_from_branches)
if [ "$highest_remote" -gt "$highest_branch" ]; then
highest_branch=$highest_remote
fi
else
git fetch --all --prune >/dev/null 2>&1 || true
local highest_branch=$(get_highest_from_branches)
fi
local highest_spec=$(get_highest_from_specs "$specs_dir")
local max_num=$highest_branch
if [ "$highest_spec" -gt "$max_num" ]; then
max_num=$highest_spec
fi
echo $((max_num + 1))
}
# Function to clean and format a branch name
clean_branch_name() {
local name="$1"
echo "$name" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/-/g' | sed 's/-\+/-/g' | sed 's/^-//' | sed 's/-$//'
}
# ---------------------------------------------------------------------------
# Source common.sh for resolve_template, json_escape, get_repo_root, has_git.
#
# Search locations in priority order:
# 1. .specify/scripts/bash/common.sh under the project root (installed project)
# 2. scripts/bash/common.sh under the project root (source checkout fallback)
# 3. git-common.sh next to this script (minimal fallback — lacks resolve_template)
# ---------------------------------------------------------------------------
SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Find project root by walking up from the script location
_find_project_root() {
local dir="$1"
while [ "$dir" != "/" ]; do
if [ -d "$dir/.specify" ] || [ -d "$dir/.git" ]; then
echo "$dir"
return 0
fi
dir="$(dirname "$dir")"
done
return 1
}
_common_loaded=false
_PROJECT_ROOT=$(_find_project_root "$SCRIPT_DIR") || true
if [ -n "$_PROJECT_ROOT" ] && [ -f "$_PROJECT_ROOT/.specify/scripts/bash/common.sh" ]; then
source "$_PROJECT_ROOT/.specify/scripts/bash/common.sh"
_common_loaded=true
elif [ -n "$_PROJECT_ROOT" ] && [ -f "$_PROJECT_ROOT/scripts/bash/common.sh" ]; then
source "$_PROJECT_ROOT/scripts/bash/common.sh"
_common_loaded=true
elif [ -f "$SCRIPT_DIR/git-common.sh" ]; then
source "$SCRIPT_DIR/git-common.sh"
_common_loaded=true
fi
if [ "$_common_loaded" != "true" ]; then
echo "Error: Could not locate common.sh or git-common.sh. Please ensure the Specify core scripts are installed." >&2
exit 1
fi
# Resolve repository root
if type get_repo_root >/dev/null 2>&1; then
REPO_ROOT=$(get_repo_root)
elif git rev-parse --show-toplevel >/dev/null 2>&1; then
REPO_ROOT=$(git rev-parse --show-toplevel)
elif [ -n "$_PROJECT_ROOT" ]; then
REPO_ROOT="$_PROJECT_ROOT"
else
echo "Error: Could not determine repository root." >&2
exit 1
fi
# Check if git is available at this repo root
if type has_git >/dev/null 2>&1; then
if has_git "$REPO_ROOT"; then
HAS_GIT=true
else
HAS_GIT=false
fi
elif git -C "$REPO_ROOT" rev-parse --is-inside-work-tree >/dev/null 2>&1; then
HAS_GIT=true
else
HAS_GIT=false
fi
cd "$REPO_ROOT"
SPECS_DIR="$REPO_ROOT/specs"
# Function to generate branch name with stop word filtering
generate_branch_name() {
local description="$1"
local stop_words="^(i|a|an|the|to|for|of|in|on|at|by|with|from|is|are|was|were|be|been|being|have|has|had|do|does|did|will|would|should|could|can|may|might|must|shall|this|that|these|those|my|your|our|their|want|need|add|get|set)$"
local clean_name=$(echo "$description" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/ /g')
local meaningful_words=()
for word in $clean_name; do
[ -z "$word" ] && continue
if ! echo "$word" | grep -qiE "$stop_words"; then
if [ ${#word} -ge 3 ]; then
meaningful_words+=("$word")
elif echo "$description" | grep -qw -- "${word^^}"; then
meaningful_words+=("$word")
fi
fi
done
if [ ${#meaningful_words[@]} -gt 0 ]; then
local max_words=3
if [ ${#meaningful_words[@]} -eq 4 ]; then max_words=4; fi
local result=""
local count=0
for word in "${meaningful_words[@]}"; do
if [ $count -ge $max_words ]; then break; fi
if [ -n "$result" ]; then result="$result-"; fi
result="$result$word"
count=$((count + 1))
done
echo "$result"
else
local cleaned=$(clean_branch_name "$description")
echo "$cleaned" | tr '-' '\n' | grep -v '^$' | head -3 | tr '\n' '-' | sed 's/-$//'
fi
}
# Check for GIT_BRANCH_NAME env var override (exact branch name, no prefix/suffix)
if [ -n "${GIT_BRANCH_NAME:-}" ]; then
BRANCH_NAME="$GIT_BRANCH_NAME"
# Extract FEATURE_NUM from the branch name if it starts with a numeric prefix
# Check timestamp pattern first (YYYYMMDD-HHMMSS-) since it also matches the simpler ^[0-9]+ pattern
if echo "$BRANCH_NAME" | grep -Eq '^[0-9]{8}-[0-9]{6}-'; then
FEATURE_NUM=$(echo "$BRANCH_NAME" | grep -Eo '^[0-9]{8}-[0-9]{6}')
BRANCH_SUFFIX="${BRANCH_NAME#${FEATURE_NUM}-}"
elif echo "$BRANCH_NAME" | grep -Eq '^[0-9]+-'; then
FEATURE_NUM=$(echo "$BRANCH_NAME" | grep -Eo '^[0-9]+')
BRANCH_SUFFIX="${BRANCH_NAME#${FEATURE_NUM}-}"
else
FEATURE_NUM="$BRANCH_NAME"
BRANCH_SUFFIX="$BRANCH_NAME"
fi
else
# Generate branch name
if [ -n "$SHORT_NAME" ]; then
BRANCH_SUFFIX=$(clean_branch_name "$SHORT_NAME")
else
BRANCH_SUFFIX=$(generate_branch_name "$FEATURE_DESCRIPTION")
fi
# Warn if --number and --timestamp are both specified
if [ "$USE_TIMESTAMP" = true ] && [ -n "$BRANCH_NUMBER" ]; then
>&2 echo "[specify] Warning: --number is ignored when --timestamp is used"
BRANCH_NUMBER=""
fi
# Determine branch prefix
if [ "$USE_TIMESTAMP" = true ]; then
FEATURE_NUM=$(date +%Y%m%d-%H%M%S)
BRANCH_NAME="${FEATURE_NUM}-${BRANCH_SUFFIX}"
else
if [ -z "$BRANCH_NUMBER" ]; then
if [ "$DRY_RUN" = true ] && [ "$HAS_GIT" = true ]; then
BRANCH_NUMBER=$(check_existing_branches "$SPECS_DIR" true)
elif [ "$DRY_RUN" = true ]; then
HIGHEST=$(get_highest_from_specs "$SPECS_DIR")
BRANCH_NUMBER=$((HIGHEST + 1))
elif [ "$HAS_GIT" = true ]; then
BRANCH_NUMBER=$(check_existing_branches "$SPECS_DIR")
else
HIGHEST=$(get_highest_from_specs "$SPECS_DIR")
BRANCH_NUMBER=$((HIGHEST + 1))
fi
fi
FEATURE_NUM=$(printf "%03d" "$((10#$BRANCH_NUMBER))")
BRANCH_NAME="${FEATURE_NUM}-${BRANCH_SUFFIX}"
fi
fi
# GitHub enforces a 244-byte limit on branch names
MAX_BRANCH_LENGTH=244
_byte_length() { printf '%s' "$1" | LC_ALL=C wc -c | tr -d ' '; }
BRANCH_BYTE_LEN=$(_byte_length "$BRANCH_NAME")
if [ -n "${GIT_BRANCH_NAME:-}" ] && [ "$BRANCH_BYTE_LEN" -gt $MAX_BRANCH_LENGTH ]; then
>&2 echo "Error: GIT_BRANCH_NAME must be 244 bytes or fewer in UTF-8. Provided value is ${BRANCH_BYTE_LEN} bytes."
exit 1
elif [ "$BRANCH_BYTE_LEN" -gt $MAX_BRANCH_LENGTH ]; then
PREFIX_LENGTH=$(( ${#FEATURE_NUM} + 1 ))
MAX_SUFFIX_LENGTH=$((MAX_BRANCH_LENGTH - PREFIX_LENGTH))
TRUNCATED_SUFFIX=$(echo "$BRANCH_SUFFIX" | cut -c1-$MAX_SUFFIX_LENGTH)
TRUNCATED_SUFFIX=$(echo "$TRUNCATED_SUFFIX" | sed 's/-$//')
ORIGINAL_BRANCH_NAME="$BRANCH_NAME"
BRANCH_NAME="${FEATURE_NUM}-${TRUNCATED_SUFFIX}"
>&2 echo "[specify] Warning: Branch name exceeded GitHub's 244-byte limit"
>&2 echo "[specify] Original: $ORIGINAL_BRANCH_NAME (${#ORIGINAL_BRANCH_NAME} bytes)"
>&2 echo "[specify] Truncated to: $BRANCH_NAME (${#BRANCH_NAME} bytes)"
fi
if [ "$DRY_RUN" != true ]; then
if [ "$HAS_GIT" = true ]; then
branch_create_error=""
if ! branch_create_error=$(git checkout -q -b "$BRANCH_NAME" 2>&1); then
current_branch="$(git rev-parse --abbrev-ref HEAD 2>/dev/null || true)"
if git branch --list "$BRANCH_NAME" | grep -q .; then
if [ "$ALLOW_EXISTING" = true ]; then
if [ "$current_branch" = "$BRANCH_NAME" ]; then
:
elif ! switch_branch_error=$(git checkout -q "$BRANCH_NAME" 2>&1); then
>&2 echo "Error: Failed to switch to existing branch '$BRANCH_NAME'. Please resolve any local changes or conflicts and try again."
if [ -n "$switch_branch_error" ]; then
>&2 printf '%s\n' "$switch_branch_error"
fi
exit 1
fi
elif [ "$USE_TIMESTAMP" = true ]; then
>&2 echo "Error: Branch '$BRANCH_NAME' already exists. Rerun to get a new timestamp or use a different --short-name."
exit 1
else
>&2 echo "Error: Branch '$BRANCH_NAME' already exists. Please use a different feature name or specify a different number with --number."
exit 1
fi
else
>&2 echo "Error: Failed to create git branch '$BRANCH_NAME'."
if [ -n "$branch_create_error" ]; then
>&2 printf '%s\n' "$branch_create_error"
else
>&2 echo "Please check your git configuration and try again."
fi
exit 1
fi
fi
else
>&2 echo "[specify] Warning: Git repository not detected; skipped branch creation for $BRANCH_NAME"
fi
printf '# To persist: export SPECIFY_FEATURE=%q\n' "$BRANCH_NAME" >&2
fi
if $JSON_MODE; then
if command -v jq >/dev/null 2>&1; then
if [ "$DRY_RUN" = true ]; then
jq -cn \
--arg branch_name "$BRANCH_NAME" \
--arg feature_num "$FEATURE_NUM" \
'{BRANCH_NAME:$branch_name,FEATURE_NUM:$feature_num,DRY_RUN:true}'
else
jq -cn \
--arg branch_name "$BRANCH_NAME" \
--arg feature_num "$FEATURE_NUM" \
'{BRANCH_NAME:$branch_name,FEATURE_NUM:$feature_num}'
fi
else
if type json_escape >/dev/null 2>&1; then
_je_branch=$(json_escape "$BRANCH_NAME")
_je_num=$(json_escape "$FEATURE_NUM")
else
_je_branch="$BRANCH_NAME"
_je_num="$FEATURE_NUM"
fi
if [ "$DRY_RUN" = true ]; then
printf '{"BRANCH_NAME":"%s","FEATURE_NUM":"%s","DRY_RUN":true}\n' "$_je_branch" "$_je_num"
else
printf '{"BRANCH_NAME":"%s","FEATURE_NUM":"%s"}\n' "$_je_branch" "$_je_num"
fi
fi
else
echo "BRANCH_NAME: $BRANCH_NAME"
echo "FEATURE_NUM: $FEATURE_NUM"
if [ "$DRY_RUN" != true ]; then
printf '# To persist in your shell: export SPECIFY_FEATURE=%q\n' "$BRANCH_NAME"
fi
fi

View File

@@ -0,0 +1,54 @@
#!/usr/bin/env bash
# Git-specific common functions for the git extension.
# Extracted from scripts/bash/common.sh — contains only git-specific
# branch validation and detection logic.
# Check if we have git available at the repo root
has_git() {
local repo_root="${1:-$(pwd)}"
{ [ -d "$repo_root/.git" ] || [ -f "$repo_root/.git" ]; } && \
command -v git >/dev/null 2>&1 && \
git -C "$repo_root" rev-parse --is-inside-work-tree >/dev/null 2>&1
}
# Strip a single optional path segment (e.g. gitflow "feat/004-name" -> "004-name").
# Only when the full name is exactly two slash-free segments; otherwise returns the raw name.
spec_kit_effective_branch_name() {
local raw="$1"
if [[ "$raw" =~ ^([^/]+)/([^/]+)$ ]]; then
printf '%s\n' "${BASH_REMATCH[2]}"
else
printf '%s\n' "$raw"
fi
}
# Validate that a branch name matches the expected feature branch pattern.
# Accepts sequential (###-* with >=3 digits) or timestamp (YYYYMMDD-HHMMSS-*) formats.
# Logic aligned with scripts/bash/common.sh check_feature_branch after effective-name normalization.
check_feature_branch() {
local raw="$1"
local has_git_repo="$2"
# For non-git repos, we can't enforce branch naming but still provide output
if [[ "$has_git_repo" != "true" ]]; then
echo "[specify] Warning: Git repository not detected; skipped branch validation" >&2
return 0
fi
local branch
branch=$(spec_kit_effective_branch_name "$raw")
# Accept sequential prefix (3+ digits) but exclude malformed timestamps
# Malformed: 7-or-8 digit date + 6-digit time with no trailing slug (e.g. "2026031-143022" or "20260319-143022")
local is_sequential=false
if [[ "$branch" =~ ^[0-9]{3,}- ]] && [[ ! "$branch" =~ ^[0-9]{7}-[0-9]{6}- ]] && [[ ! "$branch" =~ ^[0-9]{7,8}-[0-9]{6}$ ]]; then
is_sequential=true
fi
if [[ "$is_sequential" != "true" ]] && [[ ! "$branch" =~ ^[0-9]{8}-[0-9]{6}- ]]; then
echo "ERROR: Not on a feature branch. Current branch: $raw" >&2
echo "Feature branches should be named like: 001-feature-name, 1234-feature-name, or 20260319-143022-feature-name" >&2
return 1
fi
return 0
}

View File

@@ -0,0 +1,54 @@
#!/usr/bin/env bash
# Git extension: initialize-repo.sh
# Initialize a Git repository with an initial commit.
# Customizable — replace this script to add .gitignore templates,
# default branch config, git-flow, LFS, signing, etc.
set -e
SCRIPT_DIR="$(CDPATH="" cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# Find project root
_find_project_root() {
local dir="$1"
while [ "$dir" != "/" ]; do
if [ -d "$dir/.specify" ] || [ -d "$dir/.git" ]; then
echo "$dir"
return 0
fi
dir="$(dirname "$dir")"
done
return 1
}
REPO_ROOT=$(_find_project_root "$SCRIPT_DIR") || REPO_ROOT="$(pwd)"
cd "$REPO_ROOT"
# Read commit message from extension config, fall back to default
COMMIT_MSG="[Spec Kit] Initial commit"
_config_file="$REPO_ROOT/.specify/extensions/git/git-config.yml"
if [ -f "$_config_file" ]; then
_msg=$(grep '^init_commit_message:' "$_config_file" 2>/dev/null | sed 's/^init_commit_message:[[:space:]]*//' | sed 's/^["'\'']//' | sed 's/["'\'']*$//')
if [ -n "$_msg" ]; then
COMMIT_MSG="$_msg"
fi
fi
# Check if git is available
if ! command -v git >/dev/null 2>&1; then
echo "[specify] Warning: Git not found; skipped repository initialization" >&2
exit 0
fi
# Check if already a git repo
if git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
echo "[specify] Git repository already initialized; skipping" >&2
exit 0
fi
# Initialize
_git_out=$(git init -q 2>&1) || { echo "[specify] Error: git init failed: $_git_out" >&2; exit 1; }
_git_out=$(git add . 2>&1) || { echo "[specify] Error: git add failed: $_git_out" >&2; exit 1; }
_git_out=$(git commit --allow-empty -q -m "$COMMIT_MSG" 2>&1) || { echo "[specify] Error: git commit failed: $_git_out" >&2; exit 1; }
echo "✓ Git repository initialized" >&2

View File

@@ -0,0 +1,169 @@
#!/usr/bin/env pwsh
# Git extension: auto-commit.ps1
# Automatically commit changes after a Spec Kit command completes.
# Checks per-command config keys in git-config.yml before committing.
#
# Usage: auto-commit.ps1 <event_name>
# e.g.: auto-commit.ps1 after_specify
param(
[Parameter(Position = 0, Mandatory = $true)]
[string]$EventName
)
$ErrorActionPreference = 'Stop'
function Find-ProjectRoot {
param([string]$StartDir)
$current = Resolve-Path $StartDir
while ($true) {
foreach ($marker in @('.specify', '.git')) {
if (Test-Path (Join-Path $current $marker)) {
return $current
}
}
$parent = Split-Path $current -Parent
if ($parent -eq $current) { return $null }
$current = $parent
}
}
$repoRoot = Find-ProjectRoot -StartDir $PSScriptRoot
if (-not $repoRoot) { $repoRoot = Get-Location }
Set-Location $repoRoot
# Check if git is available
if (-not (Get-Command git -ErrorAction SilentlyContinue)) {
Write-Warning "[specify] Warning: Git not found; skipped auto-commit"
exit 0
}
# Temporarily relax ErrorActionPreference so git stderr warnings
# (e.g. CRLF notices on Windows) do not become terminating errors.
$savedEAP = $ErrorActionPreference
$ErrorActionPreference = 'Continue'
try {
git rev-parse --is-inside-work-tree 2>$null | Out-Null
$isRepo = $LASTEXITCODE -eq 0
} finally {
$ErrorActionPreference = $savedEAP
}
if (-not $isRepo) {
Write-Warning "[specify] Warning: Not a Git repository; skipped auto-commit"
exit 0
}
# Read per-command config from git-config.yml
$configFile = Join-Path $repoRoot ".specify/extensions/git/git-config.yml"
$enabled = $false
$commitMsg = ""
if (Test-Path $configFile) {
# Parse YAML to find auto_commit section
$inAutoCommit = $false
$inEvent = $false
$defaultEnabled = $false
foreach ($line in Get-Content $configFile) {
# Detect auto_commit: section
if ($line -match '^auto_commit:') {
$inAutoCommit = $true
$inEvent = $false
continue
}
# Exit auto_commit section on next top-level key
if ($inAutoCommit -and $line -match '^[a-z]') {
break
}
if ($inAutoCommit) {
# Check default key
if ($line -match '^\s+default:\s*(.+)$') {
$val = $matches[1].Trim().ToLower()
if ($val -eq 'true') { $defaultEnabled = $true }
}
# Detect our event subsection
if ($line -match "^\s+${EventName}:") {
$inEvent = $true
continue
}
# Inside our event subsection
if ($inEvent) {
# Exit on next sibling key (2-space indent, not 4+)
if ($line -match '^\s{2}[a-z]' -and $line -notmatch '^\s{4}') {
$inEvent = $false
continue
}
if ($line -match '\s+enabled:\s*(.+)$') {
$val = $matches[1].Trim().ToLower()
if ($val -eq 'true') { $enabled = $true }
if ($val -eq 'false') { $enabled = $false }
}
if ($line -match '\s+message:\s*(.+)$') {
$commitMsg = $matches[1].Trim() -replace '^["'']' -replace '["'']$'
}
}
}
}
# If event-specific key not found, use default
if (-not $enabled -and $defaultEnabled) {
$hasEventKey = Select-String -Path $configFile -Pattern "^\s*${EventName}:" -Quiet
if (-not $hasEventKey) {
$enabled = $true
}
}
} else {
# No config file — auto-commit disabled by default
exit 0
}
if (-not $enabled) {
exit 0
}
# Check if there are changes to commit
# Relax ErrorActionPreference so CRLF warnings on stderr do not terminate.
$savedEAP = $ErrorActionPreference
$ErrorActionPreference = 'Continue'
try {
git diff --quiet HEAD 2>$null; $d1 = $LASTEXITCODE
git diff --cached --quiet 2>$null; $d2 = $LASTEXITCODE
$untracked = git ls-files --others --exclude-standard 2>$null
} finally {
$ErrorActionPreference = $savedEAP
}
if ($d1 -eq 0 -and $d2 -eq 0 -and -not $untracked) {
Write-Host "[specify] No changes to commit after $EventName" -ForegroundColor DarkGray
exit 0
}
# Derive a human-readable command name from the event
$commandName = $EventName -replace '^after_', '' -replace '^before_', ''
$phase = if ($EventName -match '^before_') { 'before' } else { 'after' }
# Use custom message if configured, otherwise default
if (-not $commitMsg) {
$commitMsg = "[Spec Kit] Auto-commit $phase $commandName"
}
# Stage and commit
# Relax ErrorActionPreference so CRLF warnings on stderr do not terminate,
# while still allowing redirected error output to be captured for diagnostics.
$savedEAP = $ErrorActionPreference
$ErrorActionPreference = 'Continue'
try {
$out = git add . 2>&1 | Out-String
if ($LASTEXITCODE -ne 0) { throw "git add failed: $out" }
$out = git commit -q -m $commitMsg 2>&1 | Out-String
if ($LASTEXITCODE -ne 0) { throw "git commit failed: $out" }
} catch {
Write-Warning "[specify] Error: $_"
exit 1
} finally {
$ErrorActionPreference = $savedEAP
}
Write-Host "[OK] Changes committed $phase $commandName"

View File

@@ -0,0 +1,403 @@
#!/usr/bin/env pwsh
# Git extension: create-new-feature.ps1
# Adapted from core scripts/powershell/create-new-feature.ps1 for extension layout.
# Sources common.ps1 from the project's installed scripts, falling back to
# git-common.ps1 for minimal git helpers.
[CmdletBinding()]
param(
[switch]$Json,
[switch]$AllowExistingBranch,
[switch]$DryRun,
[string]$ShortName,
[Parameter()]
[long]$Number = 0,
[switch]$Timestamp,
[switch]$Help,
[Parameter(Position = 0, ValueFromRemainingArguments = $true)]
[string[]]$FeatureDescription
)
$ErrorActionPreference = 'Stop'
if ($Help) {
Write-Host "Usage: ./create-new-feature.ps1 [-Json] [-DryRun] [-AllowExistingBranch] [-ShortName <name>] [-Number N] [-Timestamp] <feature description>"
Write-Host ""
Write-Host "Options:"
Write-Host " -Json Output in JSON format"
Write-Host " -DryRun Compute branch name without creating the branch"
Write-Host " -AllowExistingBranch Switch to branch if it already exists instead of failing"
Write-Host " -ShortName <name> Provide a custom short name (2-4 words) for the branch"
Write-Host " -Number N Specify branch number manually (overrides auto-detection)"
Write-Host " -Timestamp Use timestamp prefix (YYYYMMDD-HHMMSS) instead of sequential numbering"
Write-Host " -Help Show this help message"
Write-Host ""
Write-Host "Environment variables:"
Write-Host " GIT_BRANCH_NAME Use this exact branch name, bypassing all prefix/suffix generation"
Write-Host ""
exit 0
}
if (-not $FeatureDescription -or $FeatureDescription.Count -eq 0) {
Write-Error "Usage: ./create-new-feature.ps1 [-Json] [-DryRun] [-AllowExistingBranch] [-ShortName <name>] [-Number N] [-Timestamp] <feature description>"
exit 1
}
$featureDesc = ($FeatureDescription -join ' ').Trim()
if ([string]::IsNullOrWhiteSpace($featureDesc)) {
Write-Error "Error: Feature description cannot be empty or contain only whitespace"
exit 1
}
function Get-HighestNumberFromSpecs {
param([string]$SpecsDir)
[long]$highest = 0
if (Test-Path $SpecsDir) {
Get-ChildItem -Path $SpecsDir -Directory | ForEach-Object {
if ($_.Name -match '^(\d{3,})-' -and $_.Name -notmatch '^\d{8}-\d{6}-') {
[long]$num = 0
if ([long]::TryParse($matches[1], [ref]$num) -and $num -gt $highest) {
$highest = $num
}
}
}
}
return $highest
}
function Get-HighestNumberFromNames {
param([string[]]$Names)
[long]$highest = 0
foreach ($name in $Names) {
if ($name -match '^(\d{3,})-' -and $name -notmatch '^\d{8}-\d{6}-') {
[long]$num = 0
if ([long]::TryParse($matches[1], [ref]$num) -and $num -gt $highest) {
$highest = $num
}
}
}
return $highest
}
function Get-HighestNumberFromBranches {
param()
try {
$branches = git branch -a 2>$null
if ($LASTEXITCODE -eq 0 -and $branches) {
$cleanNames = $branches | ForEach-Object {
$_.Trim() -replace '^\*?\s+', '' -replace '^remotes/[^/]+/', ''
}
return Get-HighestNumberFromNames -Names $cleanNames
}
} catch {
Write-Verbose "Could not check Git branches: $_"
}
return 0
}
function Get-HighestNumberFromRemoteRefs {
[long]$highest = 0
try {
$remotes = git remote 2>$null
if ($remotes) {
foreach ($remote in $remotes) {
$env:GIT_TERMINAL_PROMPT = '0'
$refs = git ls-remote --heads $remote 2>$null
$env:GIT_TERMINAL_PROMPT = $null
if ($LASTEXITCODE -eq 0 -and $refs) {
$refNames = $refs | ForEach-Object {
if ($_ -match 'refs/heads/(.+)$') { $matches[1] }
} | Where-Object { $_ }
$remoteHighest = Get-HighestNumberFromNames -Names $refNames
if ($remoteHighest -gt $highest) { $highest = $remoteHighest }
}
}
}
} catch {
Write-Verbose "Could not query remote refs: $_"
}
return $highest
}
function Get-NextBranchNumber {
param(
[string]$SpecsDir,
[switch]$SkipFetch
)
if ($SkipFetch) {
$highestBranch = Get-HighestNumberFromBranches
$highestRemote = Get-HighestNumberFromRemoteRefs
$highestBranch = [Math]::Max($highestBranch, $highestRemote)
} else {
try {
git fetch --all --prune 2>$null | Out-Null
} catch { }
$highestBranch = Get-HighestNumberFromBranches
}
$highestSpec = Get-HighestNumberFromSpecs -SpecsDir $SpecsDir
$maxNum = [Math]::Max($highestBranch, $highestSpec)
return $maxNum + 1
}
function ConvertTo-CleanBranchName {
param([string]$Name)
return $Name.ToLower() -replace '[^a-z0-9]', '-' -replace '-{2,}', '-' -replace '^-', '' -replace '-$', ''
}
# ---------------------------------------------------------------------------
# Source common.ps1 from the project's installed scripts.
# Search locations in priority order:
# 1. .specify/scripts/powershell/common.ps1 under the project root
# 2. scripts/powershell/common.ps1 under the project root (source checkout)
# 3. git-common.ps1 next to this script (minimal fallback)
# ---------------------------------------------------------------------------
function Find-ProjectRoot {
param([string]$StartDir)
$current = Resolve-Path $StartDir
while ($true) {
foreach ($marker in @('.specify', '.git')) {
if (Test-Path (Join-Path $current $marker)) {
return $current
}
}
$parent = Split-Path $current -Parent
if ($parent -eq $current) { return $null }
$current = $parent
}
}
$projectRoot = Find-ProjectRoot -StartDir $PSScriptRoot
$commonLoaded = $false
if ($projectRoot) {
$candidates = @(
(Join-Path $projectRoot ".specify/scripts/powershell/common.ps1"),
(Join-Path $projectRoot "scripts/powershell/common.ps1")
)
foreach ($candidate in $candidates) {
if (Test-Path $candidate) {
. $candidate
$commonLoaded = $true
break
}
}
}
if (-not $commonLoaded -and (Test-Path "$PSScriptRoot/git-common.ps1")) {
. "$PSScriptRoot/git-common.ps1"
$commonLoaded = $true
}
if (-not $commonLoaded) {
throw "Unable to locate common script file. Please ensure the Specify core scripts are installed."
}
# Resolve repository root
if (Get-Command Get-RepoRoot -ErrorAction SilentlyContinue) {
$repoRoot = Get-RepoRoot
} elseif ($projectRoot) {
$repoRoot = $projectRoot
} else {
throw "Could not determine repository root."
}
# Check if git is available
if (Get-Command Test-HasGit -ErrorAction SilentlyContinue) {
# Call without parameters for compatibility with core common.ps1 (no -RepoRoot param)
# and git-common.ps1 (has -RepoRoot param with default).
$hasGit = Test-HasGit
} else {
try {
git -C $repoRoot rev-parse --is-inside-work-tree 2>$null | Out-Null
$hasGit = ($LASTEXITCODE -eq 0)
} catch {
$hasGit = $false
}
}
Set-Location $repoRoot
$specsDir = Join-Path $repoRoot 'specs'
function Get-BranchName {
param([string]$Description)
$stopWords = @(
'i', 'a', 'an', 'the', 'to', 'for', 'of', 'in', 'on', 'at', 'by', 'with', 'from',
'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had',
'do', 'does', 'did', 'will', 'would', 'should', 'could', 'can', 'may', 'might', 'must', 'shall',
'this', 'that', 'these', 'those', 'my', 'your', 'our', 'their',
'want', 'need', 'add', 'get', 'set'
)
$cleanName = $Description.ToLower() -replace '[^a-z0-9\s]', ' '
$words = $cleanName -split '\s+' | Where-Object { $_ }
$meaningfulWords = @()
foreach ($word in $words) {
if ($stopWords -contains $word) { continue }
if ($word.Length -ge 3) {
$meaningfulWords += $word
} elseif ($Description -match "\b$($word.ToUpper())\b") {
$meaningfulWords += $word
}
}
if ($meaningfulWords.Count -gt 0) {
$maxWords = if ($meaningfulWords.Count -eq 4) { 4 } else { 3 }
$result = ($meaningfulWords | Select-Object -First $maxWords) -join '-'
return $result
} else {
$result = ConvertTo-CleanBranchName -Name $Description
$fallbackWords = ($result -split '-') | Where-Object { $_ } | Select-Object -First 3
return [string]::Join('-', $fallbackWords)
}
}
# Check for GIT_BRANCH_NAME env var override (exact branch name, no prefix/suffix)
if ($env:GIT_BRANCH_NAME) {
$branchName = $env:GIT_BRANCH_NAME
# Check 244-byte limit (UTF-8) for override names
$branchNameUtf8ByteCount = [System.Text.Encoding]::UTF8.GetByteCount($branchName)
if ($branchNameUtf8ByteCount -gt 244) {
throw "GIT_BRANCH_NAME must be 244 bytes or fewer in UTF-8. Provided value is $branchNameUtf8ByteCount bytes; please supply a shorter override branch name."
}
# Extract FEATURE_NUM from the branch name if it starts with a numeric prefix
# Check timestamp pattern first (YYYYMMDD-HHMMSS-) since it also matches the simpler ^\d+ pattern
if ($branchName -match '^(\d{8}-\d{6})-') {
$featureNum = $matches[1]
} elseif ($branchName -match '^(\d+)-') {
$featureNum = $matches[1]
} else {
$featureNum = $branchName
}
} else {
if ($ShortName) {
$branchSuffix = ConvertTo-CleanBranchName -Name $ShortName
} else {
$branchSuffix = Get-BranchName -Description $featureDesc
}
if ($Timestamp -and $Number -ne 0) {
Write-Warning "[specify] Warning: -Number is ignored when -Timestamp is used"
$Number = 0
}
if ($Timestamp) {
$featureNum = Get-Date -Format 'yyyyMMdd-HHmmss'
$branchName = "$featureNum-$branchSuffix"
} else {
if ($Number -eq 0) {
if ($DryRun -and $hasGit) {
$Number = Get-NextBranchNumber -SpecsDir $specsDir -SkipFetch
} elseif ($DryRun) {
$Number = (Get-HighestNumberFromSpecs -SpecsDir $specsDir) + 1
} elseif ($hasGit) {
$Number = Get-NextBranchNumber -SpecsDir $specsDir
} else {
$Number = (Get-HighestNumberFromSpecs -SpecsDir $specsDir) + 1
}
}
$featureNum = ('{0:000}' -f $Number)
$branchName = "$featureNum-$branchSuffix"
}
}
$maxBranchLength = 244
if ($branchName.Length -gt $maxBranchLength) {
$prefixLength = $featureNum.Length + 1
$maxSuffixLength = $maxBranchLength - $prefixLength
$truncatedSuffix = $branchSuffix.Substring(0, [Math]::Min($branchSuffix.Length, $maxSuffixLength))
$truncatedSuffix = $truncatedSuffix -replace '-$', ''
$originalBranchName = $branchName
$branchName = "$featureNum-$truncatedSuffix"
Write-Warning "[specify] Branch name exceeded GitHub's 244-byte limit"
Write-Warning "[specify] Original: $originalBranchName ($($originalBranchName.Length) bytes)"
Write-Warning "[specify] Truncated to: $branchName ($($branchName.Length) bytes)"
}
if (-not $DryRun) {
if ($hasGit) {
$branchCreated = $false
$branchCreateError = ''
try {
$branchCreateError = git checkout -q -b $branchName 2>&1 | Out-String
if ($LASTEXITCODE -eq 0) {
$branchCreated = $true
}
} catch {
$branchCreateError = $_.Exception.Message
}
if (-not $branchCreated) {
$currentBranch = ''
try { $currentBranch = (git rev-parse --abbrev-ref HEAD 2>$null).Trim() } catch {}
$existingBranch = git branch --list $branchName 2>$null
if ($existingBranch) {
if ($AllowExistingBranch) {
if ($currentBranch -eq $branchName) {
# Already on the target branch
} else {
$switchBranchError = git checkout -q $branchName 2>&1 | Out-String
if ($LASTEXITCODE -ne 0) {
if ($switchBranchError) {
Write-Error "Error: Branch '$branchName' exists but could not be checked out.`n$($switchBranchError.Trim())"
} else {
Write-Error "Error: Branch '$branchName' exists but could not be checked out. Resolve any uncommitted changes or conflicts and try again."
}
exit 1
}
}
} elseif ($Timestamp) {
Write-Error "Error: Branch '$branchName' already exists. Rerun to get a new timestamp or use a different -ShortName."
exit 1
} else {
Write-Error "Error: Branch '$branchName' already exists. Please use a different feature name or specify a different number with -Number."
exit 1
}
} else {
if ($branchCreateError) {
Write-Error "Error: Failed to create git branch '$branchName'.`n$($branchCreateError.Trim())"
} else {
Write-Error "Error: Failed to create git branch '$branchName'. Please check your git configuration and try again."
}
exit 1
}
}
} else {
if ($Json) {
[Console]::Error.WriteLine("[specify] Warning: Git repository not detected; skipped branch creation for $branchName")
} else {
Write-Warning "[specify] Warning: Git repository not detected; skipped branch creation for $branchName"
}
}
$env:SPECIFY_FEATURE = $branchName
}
if ($Json) {
$obj = [PSCustomObject]@{
BRANCH_NAME = $branchName
FEATURE_NUM = $featureNum
HAS_GIT = $hasGit
}
if ($DryRun) {
$obj | Add-Member -NotePropertyName 'DRY_RUN' -NotePropertyValue $true
}
$obj | ConvertTo-Json -Compress
} else {
Write-Output "BRANCH_NAME: $branchName"
Write-Output "FEATURE_NUM: $featureNum"
Write-Output "HAS_GIT: $hasGit"
if (-not $DryRun) {
Write-Output "SPECIFY_FEATURE environment variable set to: $branchName"
}
}

View File

@@ -0,0 +1,51 @@
#!/usr/bin/env pwsh
# Git-specific common functions for the git extension.
# Extracted from scripts/powershell/common.ps1 — contains only git-specific
# branch validation and detection logic.
function Test-HasGit {
param([string]$RepoRoot = (Get-Location))
try {
if (-not (Test-Path (Join-Path $RepoRoot '.git'))) { return $false }
if (-not (Get-Command git -ErrorAction SilentlyContinue)) { return $false }
git -C $RepoRoot rev-parse --is-inside-work-tree 2>$null | Out-Null
return ($LASTEXITCODE -eq 0)
} catch {
return $false
}
}
function Get-SpecKitEffectiveBranchName {
param([string]$Branch)
if ($Branch -match '^([^/]+)/([^/]+)$') {
return $Matches[2]
}
return $Branch
}
function Test-FeatureBranch {
param(
[string]$Branch,
[bool]$HasGit = $true
)
# For non-git repos, we can't enforce branch naming but still provide output
if (-not $HasGit) {
Write-Warning "[specify] Warning: Git repository not detected; skipped branch validation"
return $true
}
$raw = $Branch
$Branch = Get-SpecKitEffectiveBranchName $raw
# Accept sequential prefix (3+ digits) but exclude malformed timestamps
# Malformed: 7-or-8 digit date + 6-digit time with no trailing slug (e.g. "2026031-143022" or "20260319-143022")
$hasMalformedTimestamp = ($Branch -match '^[0-9]{7}-[0-9]{6}-') -or ($Branch -match '^(?:\d{7}|\d{8})-\d{6}$')
$isSequential = ($Branch -match '^[0-9]{3,}-') -and (-not $hasMalformedTimestamp)
if (-not $isSequential -and $Branch -notmatch '^\d{8}-\d{6}-') {
[Console]::Error.WriteLine("ERROR: Not on a feature branch. Current branch: $raw")
[Console]::Error.WriteLine("Feature branches should be named like: 001-feature-name, 1234-feature-name, or 20260319-143022-feature-name")
return $false
}
return $true
}

View File

@@ -0,0 +1,69 @@
#!/usr/bin/env pwsh
# Git extension: initialize-repo.ps1
# Initialize a Git repository with an initial commit.
# Customizable — replace this script to add .gitignore templates,
# default branch config, git-flow, LFS, signing, etc.
$ErrorActionPreference = 'Stop'
# Find project root
function Find-ProjectRoot {
param([string]$StartDir)
$current = Resolve-Path $StartDir
while ($true) {
foreach ($marker in @('.specify', '.git')) {
if (Test-Path (Join-Path $current $marker)) {
return $current
}
}
$parent = Split-Path $current -Parent
if ($parent -eq $current) { return $null }
$current = $parent
}
}
$repoRoot = Find-ProjectRoot -StartDir $PSScriptRoot
if (-not $repoRoot) { $repoRoot = Get-Location }
Set-Location $repoRoot
# Read commit message from extension config, fall back to default
$commitMsg = "[Spec Kit] Initial commit"
$configFile = Join-Path $repoRoot ".specify/extensions/git/git-config.yml"
if (Test-Path $configFile) {
foreach ($line in Get-Content $configFile) {
if ($line -match '^init_commit_message:\s*(.+)$') {
$val = $matches[1].Trim() -replace '^["'']' -replace '["'']$'
if ($val) { $commitMsg = $val }
break
}
}
}
# Check if git is available
if (-not (Get-Command git -ErrorAction SilentlyContinue)) {
Write-Warning "[specify] Warning: Git not found; skipped repository initialization"
exit 0
}
# Check if already a git repo
try {
git rev-parse --is-inside-work-tree 2>$null | Out-Null
if ($LASTEXITCODE -eq 0) {
Write-Warning "[specify] Git repository already initialized; skipping"
exit 0
}
} catch { }
# Initialize
try {
$out = git init -q 2>&1 | Out-String
if ($LASTEXITCODE -ne 0) { throw "git init failed: $out" }
$out = git add . 2>&1 | Out-String
if ($LASTEXITCODE -ne 0) { throw "git add failed: $out" }
$out = git commit --allow-empty -q -m $commitMsg 2>&1 | Out-String
if ($LASTEXITCODE -ne 0) { throw "git commit failed: $out" }
} catch {
Write-Warning "[specify] Error: $_"
exit 1
}
Write-Host "✓ Git repository initialized"

3
.specify/feature.json Normal file
View File

@@ -0,0 +1,3 @@
{
"feature_directory": "specs/001-oidc-proxy-script"
}

View File

@@ -1,11 +1,9 @@
{
"ai": "copilot",
"ai_commands_dir": null,
"ai_skills": false,
"branch_numbering": "sequential",
"context_file": ".github/copilot-instructions.md",
"here": true,
"offline": false,
"preset": null,
"integration": "copilot",
"script": "sh",
"speckit_version": "0.4.3"
"speckit_version": "0.7.6.dev0"
}

View File

@@ -0,0 +1,4 @@
{
"integration": "copilot",
"version": "0.7.6.dev0"
}

View File

@@ -0,0 +1,25 @@
{
"integration": "copilot",
"version": "0.7.6.dev0",
"installed_at": "2026-04-22T23:06:00.770484+00:00",
"files": {
".github/agents/speckit.analyze.agent.md": "699032fdd49afe31d23c7191f3fe7bcb1d14b081fbc94c2287e6ba3a57574fda",
".github/agents/speckit.checklist.agent.md": "d7d691689fe45427c868dcf18ade4df500f0c742a6c91923fefba405d6466dde",
".github/agents/speckit.clarify.agent.md": "0cc766dcc5cab233ccdf3bc4cfb5759a6d7d1e13e29f611083046f818f5812bb",
".github/agents/speckit.constitution.agent.md": "58d35eb026f56bb7364d91b8b0382d5dd1249ded6c1449a2b69546693afb85f7",
".github/agents/speckit.implement.agent.md": "83628415c86ba487b3a083c7a2c0f016c9073abd02c1c7f4a30cff949b6602c0",
".github/agents/speckit.plan.agent.md": "2ad128b81ccd8f5bfa78b3b43101f377dfddd8f800fa0856f85bf53b1489b783",
".github/agents/speckit.specify.agent.md": "5bbb5270836cc9a3286ce3ed96a500f3d383a54abb06aa11b01a2d2f76dbf39b",
".github/agents/speckit.tasks.agent.md": "a58886f29f75e1a14840007772ddd954742aafb3e03d9d1231bee033e6c1626b",
".github/agents/speckit.taskstoissues.agent.md": "e84794f7a839126defb364ca815352c5c2b2d20db2d6da399fa53e4ddbb7b3ee",
".github/prompts/speckit.analyze.prompt.md": "bb93dbbafa96d07b7cd07fc7061d8adb0c6b26cb772a52d0dce263b1ca2b9b77",
".github/prompts/speckit.checklist.prompt.md": "c3aea7526c5cbfd8665acc9508ad5a9a3f71e91a63c36be7bed13a834c3a683c",
".github/prompts/speckit.clarify.prompt.md": "ce79b3437ca918d46ac858eb4b8b44d3b0a02c563660c60d94c922a7b5d8d4f4",
".github/prompts/speckit.constitution.prompt.md": "38f937279de14387601422ddfda48365debdbaf47b2d513527b8f6d8a27d499d",
".github/prompts/speckit.implement.prompt.md": "5053a17fb9238338c63b898ee9c80b2cb4ad1a90c6071fe3748de76864ac6a80",
".github/prompts/speckit.plan.prompt.md": "2098dae6bd9277335f31cb150b78bfb1de539c0491798e5cfe382c89ab0bcd0e",
".github/prompts/speckit.specify.prompt.md": "7b2cc4dc6462da1c96df46bac4f60e53baba3097f4b24ac3f9b684194458aa98",
".github/prompts/speckit.tasks.prompt.md": "88fc57c289f99d5e9d35c255f3e2683f73ecb0a5155dcb4d886f82f52b11841f",
".github/prompts/speckit.taskstoissues.prompt.md": "2f9636d4f312a1470f000747cb62677fec0655d8b4e2357fa4fbf238965fa66d"
}
}

View File

@@ -0,0 +1,6 @@
{
"integration": "speckit",
"version": "0.7.6.dev0",
"installed_at": "2026-04-22T23:06:00.788253+00:00",
"files": {}
}

View File

@@ -0,0 +1,63 @@
schema_version: "1.0"
workflow:
id: "speckit"
name: "Full SDD Cycle"
version: "1.0.0"
author: "GitHub"
description: "Runs specify → plan → tasks → implement with review gates"
requires:
speckit_version: ">=0.7.2"
integrations:
any: ["copilot", "claude", "gemini"]
inputs:
spec:
type: string
required: true
prompt: "Describe what you want to build"
integration:
type: string
default: "copilot"
prompt: "Integration to use (e.g. claude, copilot, gemini)"
scope:
type: string
default: "full"
enum: ["full", "backend-only", "frontend-only"]
steps:
- id: specify
command: speckit.specify
integration: "{{ inputs.integration }}"
input:
args: "{{ inputs.spec }}"
- id: review-spec
type: gate
message: "Review the generated spec before planning."
options: [approve, reject]
on_reject: abort
- id: plan
command: speckit.plan
integration: "{{ inputs.integration }}"
input:
args: "{{ inputs.spec }}"
- id: review-plan
type: gate
message: "Review the plan before generating tasks."
options: [approve, reject]
on_reject: abort
- id: tasks
command: speckit.tasks
integration: "{{ inputs.integration }}"
input:
args: "{{ inputs.spec }}"
- id: implement
command: speckit.implement
integration: "{{ inputs.integration }}"
input:
args: "{{ inputs.spec }}"

View File

@@ -0,0 +1,13 @@
{
"schema_version": "1.0",
"workflows": {
"speckit": {
"name": "Full SDD Cycle",
"version": "1.0.0",
"description": "Runs specify \u2192 plan \u2192 tasks \u2192 implement with review gates",
"source": "bundled",
"installed_at": "2026-04-22T23:06:01.031168+00:00",
"updated_at": "2026-04-22T23:06:01.031175+00:00"
}
}
}

99
package-lock.json generated
View File

@@ -11,6 +11,7 @@
"dependencies": {
"axios": "^1.13.6",
"jsonwebtoken": "^9.0.3",
"redis": "^5.12.1",
"uuid": "^13.0.0",
"xmlbuilder2": "^4.0.3"
},
@@ -66,6 +67,79 @@
"node": ">=20.0"
}
},
"node_modules/@redis/bloom": {
"version": "5.12.1",
"resolved": "https://registry.npmjs.org/@redis/bloom/-/bloom-5.12.1.tgz",
"integrity": "sha512-PUUfv+ms7jgPSBVoo/DN4AkPHj4D5TZSd6SbJX7egzBplkYUcKmHRE8RKia7UtZ8bSQbLguLvxVO+asKtQfZWA==",
"license": "MIT",
"engines": {
"node": ">= 18.19.0"
},
"peerDependencies": {
"@redis/client": "^5.12.1"
}
},
"node_modules/@redis/client": {
"version": "5.12.1",
"resolved": "https://registry.npmjs.org/@redis/client/-/client-5.12.1.tgz",
"integrity": "sha512-7aPGWeqA3uFm43o19umzdl16CEjK/JQGtSXVPevplTaOU3VJA/rseBC1QvYUz9lLDIMBimc4SW/zrW4S89BaCA==",
"license": "MIT",
"peer": true,
"dependencies": {
"cluster-key-slot": "1.1.2"
},
"engines": {
"node": ">= 18.19.0"
},
"peerDependencies": {
"@node-rs/xxhash": "^1.1.0",
"@opentelemetry/api": ">=1 <2"
},
"peerDependenciesMeta": {
"@node-rs/xxhash": {
"optional": true
},
"@opentelemetry/api": {
"optional": true
}
}
},
"node_modules/@redis/json": {
"version": "5.12.1",
"resolved": "https://registry.npmjs.org/@redis/json/-/json-5.12.1.tgz",
"integrity": "sha512-eOze75esLve4vfqDel7aMX08CNaiLLQS2fV8mpRN9NxPe1rVR4vQyYiW/OgtGUysF6QOr9ANhfxABKNOJfXdKg==",
"license": "MIT",
"engines": {
"node": ">= 18.19.0"
},
"peerDependencies": {
"@redis/client": "^5.12.1"
}
},
"node_modules/@redis/search": {
"version": "5.12.1",
"resolved": "https://registry.npmjs.org/@redis/search/-/search-5.12.1.tgz",
"integrity": "sha512-ItlxbxC9cKI6IU1TLWoczwJCRb6TdmkEpWv05UrPawqaAnWGRu3rcIqsc5vN483T2fSociuyV1UkWIL5I4//2w==",
"license": "MIT",
"engines": {
"node": ">= 18.19.0"
},
"peerDependencies": {
"@redis/client": "^5.12.1"
}
},
"node_modules/@redis/time-series": {
"version": "5.12.1",
"resolved": "https://registry.npmjs.org/@redis/time-series/-/time-series-5.12.1.tgz",
"integrity": "sha512-c6JL6E3EcZJuNqKFz+KM+l9l5mpcQiKvTwgA3blt5glWJ8hjDk0yeHN3beE/MpqYIQ8UEX44ItQzgkE/gCBELQ==",
"license": "MIT",
"engines": {
"node": ">= 18.19.0"
},
"peerDependencies": {
"@redis/client": "^5.12.1"
}
},
"node_modules/argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
@@ -108,6 +182,15 @@
"node": ">= 0.4"
}
},
"node_modules/cluster-key-slot": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
@@ -469,6 +552,22 @@
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==",
"license": "MIT"
},
"node_modules/redis": {
"version": "5.12.1",
"resolved": "https://registry.npmjs.org/redis/-/redis-5.12.1.tgz",
"integrity": "sha512-LDsoVvb/CpoV9EN3FXvgvSHNJWuCIzl9MiO3ppOevuGLpSGJhwfQjpEwfFJcQvNSddHADDdZaWx0HnmMxRXG7g==",
"license": "MIT",
"dependencies": {
"@redis/bloom": "5.12.1",
"@redis/client": "5.12.1",
"@redis/json": "5.12.1",
"@redis/search": "5.12.1",
"@redis/time-series": "5.12.1"
},
"engines": {
"node": ">= 18.19.0"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",

View File

@@ -27,6 +27,7 @@
"dependencies": {
"axios": "^1.13.6",
"jsonwebtoken": "^9.0.3",
"redis": "^5.12.1",
"uuid": "^13.0.0",
"xmlbuilder2": "^4.0.3"
}

View File

@@ -0,0 +1,38 @@
# Specification Quality Checklist: OIDC Proxy Script Authentication
**Purpose**: Validate specification completeness and quality before proceeding to planning
**Created**: 2025-07-16
**Feature**: [spec.md](../spec.md)
## Content Quality
- [x] No implementation details (languages, frameworks, APIs)
- [x] Focused on user value and business needs
- [x] Written for non-technical stakeholders
- [x] All mandatory sections completed
## Requirement Completeness
- [x] No [NEEDS CLARIFICATION] markers remain
- [x] Requirements are testable and unambiguous
- [x] Success criteria are measurable
- [x] Success criteria are technology-agnostic (no implementation details)
- [x] All acceptance scenarios are defined
- [x] Edge cases are identified
- [x] Scope is clearly bounded
- [x] Dependencies and assumptions identified
## Feature Readiness
- [x] All functional requirements have clear acceptance criteria
- [x] User scenarios cover primary flows
- [x] Feature meets measurable outcomes defined in Success Criteria
- [x] No implementation details leak into specification
## Notes
All checklist items pass. The spec is ready for `/speckit.plan` or `/speckit.clarify`.
Reviewer notes:
- FR-009 through FR-011 reference specific file paths and injected dependencies. These are treated as architecture constraints (not implementation choices), as they are dictated by the VM sandbox execution model defined in the project constitution.
- The `expires_in` interpretation (absolute epoch vs relative duration) is documented as an assumption based on the example value `1532618185`. If this assumption is incorrect, FR-006 should be updated accordingly before planning.

View File

@@ -0,0 +1,113 @@
# Contract: Proxy HTTP Responses
**Feature**: 001-oidc-proxy-script
**File**: `src/proxyScripts/proxy.js`
**Endpoint**: Any path handled by the adapter (all requests delegated to proxy.js by server.js)
**Date**: 2025-07-17
---
## Overview
`proxy.js` responds to every inbound HTTP request with exactly one of two outcomes:
a success response (authentication succeeded) or an error response (authentication failed for
any reason). The contract defines the exact shape of both outcomes.
---
## Success Response
**Trigger**: OIDC token successfully obtained (fresh fetch or valid cached token).
```
HTTP/1.1 200 OK
Content-Type: text/plain
Authorized
```
| Property | Value |
|----------|-------|
| Status code | `200` |
| Status text | `OK` |
| `Content-Type` header | `text/plain` |
| Body | Literal string `Authorized` (no trailing newline) |
**Acceptance test** (FR-007, SC-001, SC-002):
```javascript
assert.strictEqual(res.statusCode, 200);
assert.strictEqual(res.body, 'Authorized');
```
---
## Error Response
**Trigger**: Any of the following (FR-008, SC-004):
- Token service returns HTTP 4xx or 5xx
- Token service is unreachable (network error)
- Token request times out after 5 seconds (FR-014)
- Token service response is missing `id_token` or `expires_in`
- `adapter_settings` is missing a required field
```
HTTP/1.1 401 Unauthorized
Content-Type: text/plain
Unauthorized: <descriptive message>
```
| Property | Value |
|----------|-------|
| Status code | `401` |
| Status text | `Unauthorized` |
| `Content-Type` header | `text/plain` |
| Body prefix | `Unauthorized: ` (literal, followed by the error message) |
| Body | Never empty; always includes a human-readable description |
**Example bodies by error cause**:
| Cause | Example body |
|-------|-------------|
| Invalid credentials (401 from token service) | `Unauthorized: HTTP 401` |
| Token service unavailable | `Unauthorized: connect ECONNREFUSED 127.0.0.1:443` |
| 5-second timeout | `Unauthorized: token service timeout` |
| Response missing `id_token` | `Unauthorized: id_token missing from response` |
| Response missing `expires_in` | `Unauthorized: expires_in missing from response` |
| Missing `tokenUrl` in settings | `Unauthorized: missing required field: tokenUrl` |
**Acceptance test** (FR-008):
```javascript
assert.strictEqual(res.statusCode, 401);
assert.match(res.body, /^Unauthorized: .+/);
```
---
## Invariants
These MUST hold for every request, regardless of outcome:
1. **One response per request**: `res.writeHead()` MUST be called exactly once;
`res.end()` MUST be called exactly once.
2. **Never 500**: `proxy.js` MUST NOT emit a 500 or leave the connection open. All
errors, including unexpected runtime errors, MUST result in a `401` (not a crash or hang).
3. **No imports/exports** (FR-009): The script MUST contain zero `import` or `export`
statements — verified by static analysis.
4. **No forbidden globals** (FR-010): No `config`, `global.config`, or `process.env`
references — verified by static analysis.
5. **Response within 5 seconds** (SC-001, FR-014): The HTTP timeout on the token POST
is 5 000 ms. Combined with synchronous error handling, every request resolves within
5 seconds under normal network conditions.
---
## Out of Scope
- The proxy script does NOT validate the inbound request (method, path, headers, body).
Its sole responsibility is OIDC authentication.
- The response does NOT include the OIDC token in the body. The `200 OK / Authorized`
body is sufficient to confirm authentication succeeded (spec assumption, line 106).
- No `Authorization` response header is set. The adapter's caller does not require it.

View File

@@ -0,0 +1,222 @@
# Contract: VM Context Dependencies
**Feature**: 001-oidc-proxy-script
**File**: `src/proxyScripts/proxy.js`
**Injector**: `src/server.js` via `vm.createContext()`
**Date**: 2025-07-17
---
## Overview
`proxy.js` runs in a Node.js VM sandbox. It has **zero access** to the Node.js module system
(`require`, `import`). All dependencies are injected by `server.js` through the
`vm.createContext()` object. This document specifies exactly what `proxy.js` depends on and
what `server.js` must provide.
---
## Required Context Variables
These MUST be present in every `vm.createContext()` call. Their absence will cause a
runtime `ReferenceError` inside the sandbox.
### `adapter_settings` — OIDC Configuration
Loaded from `src/globalVariables/adapter_settings.json` by `loadGlobalVariables()`.
| Property | Type | Required | Used for |
|----------|------|----------|---------|
| `tokenUrl` | `string` | ✅ | OIDC token endpoint URL (POST target) |
| `username` | `string` | ✅ | ROPC grant — resource owner username |
| `password` | `string` | ✅ | ROPC grant — resource owner password |
| `clientId` | `string` | ✅ | OAuth 2.0 client_id parameter |
| `scope` | `string` | ✅ | OAuth 2.0 scope parameter |
| `_pendingFetch` | `Promise \| null \| undefined` | — | In-flight stampede guard, initialised by proxy.js at runtime |
The `_pendingFetch` property is NOT in the JSON file; proxy.js adds it at runtime. Token and expiry are stored in Redis, not on this object.
### `redis` — Redis Client
Injected as a global. Used for persistent token caching across VM invocations.
| Method used | Signature | Purpose |
|------------|-----------|---------|
| `redis.hSet(key, field, value)` | `Promise<number>` | Store token or expiry in hash |
| `redis.hGet(key, field)` | `Promise<string \| null>` | Read token or expiry from hash |
Usage pattern in proxy.js:
```javascript
// Write
await redis.hSet('authorization', 'token', idToken);
await redis.hSet('authorization', 'expiry', String(expiresIn));
// Read
const token = await redis.hGet('authorization', 'token');
const expiry = parseFloat(await redis.hGet('authorization', 'expiry') ?? '0');
```
### `axios` — HTTP Client
Standard `axios` instance from the `axios` npm package (v1.x).
| Method used | Signature | Purpose |
|------------|-----------|---------|
| `axios.post(url, data, config)` | `Promise<AxiosResponse>` | POST to token endpoint |
Config properties used by proxy.js:
```javascript
{
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
timeout: 5000 // milliseconds — FR-014
}
```
### `URLSearchParams` — Form Body Builder
Global Web API (Node.js 18+ built-in).
| Usage | Purpose |
|-------|---------|
| `new URLSearchParams({ ... })` | Build `application/x-www-form-urlencoded` body |
### `console` — Structured Logger
Custom logger from `src/logger.js` (injected as `console` so proxy.js uses it transparently).
| Method | Used when |
|--------|----------|
| `console.error({ message, error })` | Authentication failure or unexpected error |
### `req` — HTTP Request Object
Node.js `http.IncomingMessage`, fresh per request.
| Property | Type | Usage |
|----------|------|-------|
| *(none)* | — | `proxy.js` does not read any request properties in this feature; `req` is present by convention |
### `res` — HTTP Response Object
Node.js `http.ServerResponse`, fresh per request.
| Method | Signature | Called when |
|--------|-----------|-------------|
| `res.writeHead(statusCode, headers)` | `void` | Before sending body |
| `res.end(body)` | `void` | Send response body and close |
---
## Optional / Unused Context Variables
These are injected by `server.js` (`globalVMContext`) and are available to proxy.js but
**not used** by this feature's implementation:
| Variable | Type | Reason unused |
|----------|------|--------------|
| `URL` | Web API | URL parsing not needed |
| `crypto` | Web Crypto API | No UUID or crypto ops in this script |
| `jwt` | jsonwebtoken | No JWT signing/verification needed |
| `uuidv4` | uuid function | No request-ID generation needed |
| `xmlBuilder` | xmlbuilder2 | No XML output |
---
## Injection Pattern (server.js)
```javascript
// Static VM context (compiled once at startup)
const globalVMContext = {
URLSearchParams, // ← used by proxy.js
URL,
console: logger, // ← used by proxy.js
crypto,
axios, // ← used by proxy.js
uuidv4,
jwt,
xmlBuilder,
redis, // ← used by proxy.js (token cache)
};
// Dynamic data (loaded from src/globalVariables/ at startup)
let globalVariableContext = {};
loadGlobalVariables(); // populates globalVariableContext.adapter_settings
// Per-request context (fresh sandbox each time)
const context = vm.createContext({
...globalVMContext, // spread — object refs, not clones
...globalVariableContext, // includes adapter_settings ← used by proxy.js
req, // fresh per request
res, // fresh per request
});
script.runInContext(context);
```
---
## State Ownership
| State | Lives on | Lifetime | Owned by |
|-------|----------|---------|---------|
| OIDC credentials | `adapter_settings` (JSON properties) | Process lifetime | `server.js` (loads from file) |
| Cached token | Redis hash `authorization`, field `token` | Until expiry / Redis flush | `proxy.js` (writes on fetch) |
| Token expiry | Redis hash `authorization`, field `expiry` | Until expiry / Redis flush | `proxy.js` (writes on fetch) |
| In-flight fetch promise | `adapter_settings._pendingFetch` (runtime property) | Duration of one fetch | `proxy.js` |
**Key invariant**: `adapter_settings` is the *same JS object reference* in every
`vm.createContext()` call. The `_pendingFetch` property written by `proxy.js` persists
across requests for stampede guarding. Token data is read from and written to Redis,
making it durable across adapter restarts.
---
## Test Contract (Minimal Fake Context)
A test context satisfying this contract:
```javascript
// tests/unit/proxy.test.js
import vm from 'node:vm';
function makeContext(t, overrides = {}) {
// In-memory Redis hash store fake
const _store = {};
const redis = {
hSet: t.mock.fn(async (key, field, value) => {
_store[`${key}:${field}`] = value;
return 1;
}),
hGet: t.mock.fn(async (key, field) => _store[`${key}:${field}`] ?? null),
};
let statusCode = null;
let body = '';
const res = {
writeHead: t.mock.fn((code) => { statusCode = code; }),
end: t.mock.fn((b = '') => { body += String(b); }),
get statusCode() { return statusCode; },
get body() { return body; },
};
const adapter_settings = {
tokenUrl: 'https://auth.example.com/token',
username: 'testuser', password: 'testpass',
clientId: 'test-client', scope: 'openid',
};
return vm.createContext({
URLSearchParams,
console,
axios: {
post: t.mock.fn(async () => ({
data: { id_token: 'mock-token', expires_in: 9_999_999_999 },
})),
},
redis,
adapter_settings,
req: { url: '/', method: 'GET', headers: {} },
res,
...overrides,
});
}
```

View File

@@ -0,0 +1,197 @@
# Data Model: OIDC Proxy Script Authentication
**Feature**: 001-oidc-proxy-script
**Phase**: 1 — Design
**Date**: 2025-07-17
---
## Entities
### 1. AdapterSettings
Persisted as `src/globalVariables/adapter_settings.json`. Loaded at server startup by
`loadGlobalVariables()` and injected into every VM context as the `adapter_settings` variable.
| Field | Type | Required | Description |
|-------|------|----------|-------------|
| `tokenUrl` | `string` | ✅ | Full HTTPS URL of the OIDC token endpoint |
| `username` | `string` | ✅ | Resource owner username (ROPC grant) |
| `password` | `string` | ✅ | Resource owner password |
| `clientId` | `string` | ✅ | OAuth 2.0 client identifier |
| `scope` | `string` | ✅ | Space-separated scopes, e.g. `"openid tags content_entitlements"` |
**JSON Schema**:
```json
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"required": ["tokenUrl", "username", "password", "clientId", "scope"],
"additionalProperties": false,
"properties": {
"tokenUrl": { "type": "string", "format": "uri" },
"username": { "type": "string", "minLength": 1 },
"password": { "type": "string", "minLength": 1 },
"clientId": { "type": "string", "minLength": 1 },
"scope": { "type": "string", "minLength": 1 }
}
}
```
**Example** (`src/globalVariables/adapter_settings.json`):
```json
{
"tokenUrl": "https://auth.kme.example.com/protocol/openid-connect/token",
"username": "service-account@example.com",
"password": "s3cr3t",
"clientId": "kme-content-adapter",
"scope": "openid tags content_entitlements"
}
```
> **Security**: This file MUST be excluded from version control (`.gitignore`).
> Provide `src/globalVariables/adapter_settings.json.example` with placeholder values.
---
### 2. TokenCache (Redis-backed)
Persisted in Redis as a hash under the key `authorization`. Written by `proxy.js` after
a successful token fetch; read on every request to check validity.
| Redis field | Type (stored) | Description |
|-------------|--------------|-------------|
| `token` | `string` | The cached `id_token` value |
| `expiry` | `string` (numeric) | Absolute Unix epoch timestamp (seconds) from `expires_in`; `"0"` or absent means expired |
**Initialisation**: No pre-seeding required. `redis.hGet` returns `null` for absent fields,
which proxy.js treats as expired.
**In-process stampede guard** (not in Redis):
```javascript
// adapter_settings._pendingFetch — set by proxy.js at runtime
adapter_settings._pendingFetch = fetchPromise; // set before fetch
adapter_settings._pendingFetch = null; // cleared in finally block
```
**Read pattern** (proxy.js):
```javascript
const token = await redis.hGet('authorization', 'token');
const expiry = parseFloat(await redis.hGet('authorization', 'expiry') ?? '0');
const isValid = token !== null && Date.now() / 1000 < expiry;
```
**Write pattern** (proxy.js, after successful fetch):
```javascript
await redis.hSet('authorization', 'token', idToken);
await redis.hSet('authorization', 'expiry', String(expiresIn));
```
**State transitions**:
```
┌──────────────────────────────────────────────────────────┐
│ │
┌────────────▼──────────────┐ fetch starts ┌──────────────────┐│
│ EMPTY / EXPIRED │─────────────────────▶│ FETCHING ││
│ token: null / expiry ≤ now│ │ pendingFetch: P ││
└───────────────────────────┘ └──────┬───────────┘│
▲ │ │
│ token expires fetch settles│ │
│ (Date.now()/1000 ≥ expiry) ┌───────────┴──────────┐ │
│ │ │ │
│ success │ failure │ │
│ ▼ ▼ │
│ ┌──────────────────┐ ┌──────────────┐│
│ │ VALID │ │ ERROR ││
└──────────────────────│ token: "abc…" │ │ token: null ││
│ expiry: 99999… │ │ expiry: 0 ││
└──────────────────┘ └──────────────┘│
│ │
└─────────────────────────────┘
(back to EMPTY / EXPIRED)
```
**Validity rule** (FR-006):
```javascript
const token = await redis.hGet('authorization', 'token');
const expiry = parseFloat(await redis.hGet('authorization', 'expiry') ?? '0');
const isValid = token !== null && Date.now() / 1000 < expiry;
```
---
### 3. OidcTokenResponse (external — token service)
Returned by the OIDC token service in response to a successful POST. Only the fields used by
`proxy.js` are listed; additional fields may be present and are ignored.
| Field | Type | Description |
|-------|------|-------------|
| `id_token` | `string` | Bearer token to cache and use for authentication (FR-004) |
| `expires_in` | `number` | Absolute Unix epoch timestamp (seconds) when token expires (FR-006) |
**Error response** (HTTP 4xx from token service): Not parsed; the HTTP status code alone is
sufficient to trigger a 401 response to the caller.
---
### 4. ProxyResponse (outbound HTTP)
The HTTP response sent by `proxy.js` back to the caller via the injected `res` object.
| Scenario | Status | Content-Type | Body |
|----------|--------|-------------|------|
| Authentication success | `200 OK` | `text/plain` | `Authorized` |
| Auth failure (any cause) | `401 Unauthorized` | `text/plain` | `Unauthorized: <message>` |
---
## Relationships
```
server.js
├── loadGlobalVariables()
│ └── reads adapter_settings.json ──────────► AdapterSettings
├── redis (injected into VM context) ───────────► Redis Store
│ └── hash: authorization
│ ├── token
│ └── expiry
└── vm.createContext({ ...globalVariableContext, redis, req, res })
└── script.runInContext(context) [proxy.js]
├── reads adapter_settings ─────────► AdapterSettings
├── reads/writes redis ──────────────► Redis Store (token + expiry)
├── sets adapter_settings._pendingFetch (stampede guard, in-process)
├── POST tokenUrl ────────────────────► OIDC Token Service
│ └── receives ─────────────────► OidcTokenResponse
└── writes res ──────────────────────► ProxyResponse
```
---
## Validation Rules
| Rule | Location | Behaviour on violation |
|------|----------|----------------------|
| `tokenUrl` present | proxy.js startup | 401 with `'missing required field: tokenUrl'` |
| `username` present | proxy.js startup | 401 with `'missing required field: username'` |
| `password` present | proxy.js startup | 401 with `'missing required field: password'` |
| `clientId` present | proxy.js startup | 401 with `'missing required field: clientId'` |
| `scope` present | proxy.js startup | 401 with `'missing required field: scope'` |
| `id_token` present in response | proxy.js after fetch | 401 with `'id_token missing from response'` |
| `expires_in` present in response | proxy.js after fetch | 401 with `'expires_in missing from response'` |
| Redis `hSet` / `hGet` available | proxy.js on every request | `ReferenceError` (server.js must inject `redis`) |
> Validation errors are caught by the top-level `catch` block in proxy.js and result in a
> `401 Unauthorized` response, never a process crash (SC-004).

View File

@@ -0,0 +1,108 @@
# Implementation Plan: OIDC Proxy Script Authentication
**Branch**: `001-oidc-proxy-script` | **Date**: 2025-07-17 | **Spec**: [spec.md](spec.md)
**Input**: Feature specification from `specs/001-oidc-proxy-script/spec.md`
## Summary
Create `src/globalVariables/adapter_settings.json` with OIDC credentials and implement
`src/proxyScripts/proxy.js` — a zero-import/export Node.js VM-sandbox script that:
- Reads OIDC credentials exclusively from the injected `adapter_settings` context variable (FR-001)
- POSTs to the configured `tokenUrl` with `application/x-www-form-urlencoded` body and a 5-second
timeout (FR-003, FR-014)
- Extracts the bearer token from the `id_token` field of the JSON response (FR-004)
- Caches the token in **Redis** (`redis.hSet('authorization', 'token', ...)` / `redis.hSet('authorization', 'expiry', ...)`) using `expires_in` as an **absolute Unix epoch timestamp** (FR-005, FR-006)
- Queues concurrent callers on a shared promise to prevent token-fetch stampedes (FR-013)
- Returns `200 OK / Authorized` on success and `401 Unauthorized` with a descriptive message on any
failure (FR-007, FR-008)
No modifications to `server.js` are required. The existing `loadGlobalVariables()` pattern
automatically picks up `adapter_settings.json` and injects it as `adapter_settings` into every VM
context.
## Technical Context
**Language/Version**: Node.js 18+ (ES Modules; `"type": "module"` in package.json)
**Primary Dependencies**: `axios` ^1.13.6, `uuid` ^13.0.0, `jsonwebtoken` ^9.0.3,
`xmlbuilder2` ^4.0.3 — all already present in `package.json`; no new packages required
**Storage**: Redis — token and expiry stored in hash key `authorization` via `redis.hSet`/`hGet`; `redis` is injected into the VM context as a global. An in-process `adapter_settings._pendingFetch` guards against stampede (Promises cannot be serialised to Redis).
**Testing**: Node.js built-in `node:test` runner (`node --test tests/**/*.test.js`);
`t.mock.fn()`, `t.mock.timers` for fakes; no external test framework needed
**Target Platform**: Linux/macOS, long-running Node.js 18+ server process
**Project Type**: HTTP proxy adapter — VM-sandboxed script (IVA Studio proxy script pattern)
**Performance Goals**: Every request responds within 5 s (SC-001); zero token-service round-trips
when a valid cached token exists (SC-002)
**Constraints**: `proxy.js` MUST have zero `import`/`export` statements; MUST NOT reference
`config`, `global.config`, or `process.env`; all dependencies injected via `vm.createContext()`
**Scale/Scope**: Single-process, single-tenant; one OIDC token shared across all concurrent
requests
## Constitution Check
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
| Principle | Status | Notes |
|-----------|--------|-------|
| **I. Monolithic Architecture** | ✅ PASS | All auth + cache logic in `proxy.js`; no helper extraction |
| **I. Zero Imports/Exports** | ✅ PASS | `proxy.js` uses only VM-injected globals; zero `import`/`export` |
| **I.0 Forbidden Globals** | ✅ PASS | No `config`, `global.config`, or `process.env` in `proxy.js` |
| **I.I What MUST be in proxy.js** | ✅ PASS | Authentication, token cache, stampede queue all in `proxy.js` |
| **I.II Allowed Separate Files** | ✅ PASS | Only adding `adapter_settings.json` to `src/globalVariables/` |
| **I.IV Configuration** | ✅ PASS | Credentials in `src/globalVariables/adapter_settings.json`, not `config/default.json` |
| **I.V VM Context Injection** | ✅ PASS | `adapter_settings` auto-loaded by existing `loadGlobalVariables()` — no server.js changes |
| **II. API-First Design** | ✅ PASS | HTTP response contract and VM context contract documented before implementation |
| **III. Test-First Development** | ✅ PASS | Test scenarios defined; tests written before implementation code |
**No violations. Complexity Tracking not required.**
### Post-Design Re-check
| Principle | Status | Notes |
|-----------|--------|-------|
| **Cache state isolation** | ✅ PASS | Token/expiry stored in Redis hash `authorization`; in-process `adapter_settings._pendingFetch` holds stampede guard (Promise not serialisable to Redis) |
| **Cross-realm Promise** | ✅ PASS | Stampede guard uses duck-type check (`typeof .then === 'function'`) rather than `instanceof Promise`; `await` uses the Promises/A+ thenable protocol which is cross-realm safe |
| **Async error containment** | ✅ PASS | `script.runInContext()` is not awaited by server.js; proxy.js top-level async IIFE catches all errors internally and always sends a response |
## Project Structure
### Documentation (this feature)
```text
specs/001-oidc-proxy-script/
├── plan.md # This file
├── research.md # Phase 0 — resolved unknowns
├── data-model.md # Phase 1 — entities and state transitions
├── quickstart.md # Phase 1 — setup and testing guide
├── contracts/
│ ├── proxy-http.md # HTTP response contract (success + error)
│ └── vm-context.md # VM dependency injection contract
└── tasks.md # Phase 2 — task list (/speckit.tasks, not created here)
```
### Source Code (repository root)
```text
src/
├── proxyScripts/
│ └── proxy.js # NEW — OIDC authentication proxy (VM sandbox)
├── globalVariables/
│ └── adapter_settings.json # NEW — OIDC credentials and token endpoint
├── logger.js # EXISTING — no changes
└── server.js # EXISTING — no changes required
tests/
├── unit/
│ └── proxy.test.js # NEW — unit tests: cache, expiry, stampede, error paths
└── contract/
└── proxy-http.test.js # NEW — contract tests: HTTP 200/401 response shape
```
**Structure Decision**: Single-project layout (Option 1). Two new source files, two new test
files. No new directories (both `src/proxyScripts/` and `src/globalVariables/` already exist as
defined directories in the constitution; `tests/unit/` and `tests/contract/` match the existing
`package.json` test scripts).
## Complexity Tracking
> No constitution violations — this section is intentionally empty.

View File

@@ -0,0 +1,196 @@
# Quickstart: OIDC Proxy Script Authentication
**Feature**: 001-oidc-proxy-script
**Date**: 2025-07-17
---
## Prerequisites
- Node.js 18.0.0 or later (`node --version`)
- Access to a KME OIDC token service endpoint
- Project dependencies installed (`npm install`)
---
## Step 1 — Create `adapter_settings.json`
```bash
cd /path/to/kme-content-adapter
cp src/globalVariables/adapter_settings.json.example \
src/globalVariables/adapter_settings.json
```
Edit `src/globalVariables/adapter_settings.json` with your real credentials:
```json
{
"tokenUrl": "https://auth.kme.example.com/protocol/openid-connect/token",
"username": "your-service-account@example.com",
"password": "your-password",
"clientId": "your-client-id",
"scope": "openid tags content_entitlements"
}
```
> **Security**: `adapter_settings.json` is in `.gitignore`. Never commit credentials.
> The `.example` file (with placeholder values) IS committed and serves as the template.
---
## Step 2 — Start the Adapter
```bash
npm start
# or for development with auto-reload:
npm run dev
```
Expected startup log (structured JSON):
```json
{"message": "Loaded global data: adapter_settings", "keys": ["tokenUrl","username","password","clientId","scope"]}
{"message": "Loaded 1 global variables", "json": 1, "js": 0}
{"message": "Configuration loaded", "port": 3000, "host": "0.0.0.0", ...}
{"message": "Configuration validated successfully"}
{"message": "Server listening", "port": 3000, "host": "0.0.0.0"}
```
---
## Step 3 — Send a Test Request
```bash
curl -v http://localhost:3000/ProxyScript/run/67bca862210071627d32ef12/current/kmeAdapter
```
**Expected response on first request** (token fetched from OIDC service):
```
HTTP/1.1 200 OK
Content-Type: text/plain
Authorized
```
**Expected response on subsequent requests** (token served from cache):
```
HTTP/1.1 200 OK
Content-Type: text/plain
Authorized
```
No visible difference from the caller's perspective; the adapter log will show no new
axios call for the second request (cache hit).
**Expected response with invalid credentials**:
```
HTTP/1.1 401 Unauthorized
Content-Type: text/plain
Unauthorized: HTTP 401
```
---
## Step 4 — Run Tests
### Unit tests (fast, no network, no server required)
```bash
npm run test:unit
# runs: node --test tests/unit/proxy.test.js
```
Exercises: cache hit, cache miss, token expiry, stampede prevention, timeout handling,
missing `id_token`, missing required fields, HTTP error from token service.
### Contract tests (starts real HTTP server with mock token endpoint)
```bash
npm run test:contract
# runs: node --test tests/contract/proxy-http.test.js
```
Exercises: end-to-end `200 OK / Authorized`, end-to-end `401 Unauthorized`, verifies
response headers and exact body strings.
### All tests
```bash
npm test
# runs: node --test tests/**/*.test.js
```
---
## Architecture Summary
```
Inbound HTTP request
server.js (http.createServer)
│ creates fresh vm.createContext({
│ ...globalVMContext, ← axios, URLSearchParams, console, ...
│ ...globalVariableContext, ← adapter_settings (from JSON)
│ req, res ← fresh per request
│ })
proxy.js (vm.Script, compiled once)
├─ reads adapter_settings._cache
│ ├─ CACHE HIT: token valid → 200 OK / Authorized
│ └─ CACHE MISS / EXPIRED:
│ ├─ FETCHING (stampede guard): queue on _cache.pendingFetch → 200/401
│ └─ NEW FETCH:
│ POST tokenUrl (timeout: 5s)
│ ├─ SUCCESS: cache token + expiry → 200 OK / Authorized
│ └─ FAILURE: → 401 Unauthorized: <message>
HTTP response to caller
```
---
## Key Files
| File | Status | Purpose |
|------|--------|---------|
| `src/globalVariables/adapter_settings.json` | **Create** | OIDC credentials (gitignored) |
| `src/globalVariables/adapter_settings.json.example` | **Create** | Template with placeholder values |
| `src/proxyScripts/proxy.js` | **Create** | OIDC authentication proxy (VM sandbox) |
| `tests/unit/proxy.test.js` | **Create** | Unit tests (no network, no server) |
| `tests/contract/proxy-http.test.js` | **Create** | HTTP response contract tests |
| `src/server.js` | **No change** | Existing infrastructure; auto-loads adapter_settings.json |
| `config/default.json` | **No change** | Infrastructure settings only (port, host, log level) |
---
## Token Lifecycle
| Phase | What happens |
|-------|-------------|
| **First request** | `_cache.token` is null → fresh token fetch → cache `id_token` + `expires_in` |
| **Subsequent requests (valid token)** | `Date.now()/1000 < _cache.expiry` → return `Authorized` immediately, no network call |
| **After token expiry** | `Date.now()/1000 ≥ _cache.expiry` → fresh token fetch (transparent to caller) |
| **Concurrent requests during fetch** | All requests `await` the shared `_cache.pendingFetch` promise; only ONE HTTP call made |
| **Auth failure** | Clear `_cache.token` and `_cache.expiry` → respond `401 Unauthorized: <reason>` |
| **Timeout (5 s)** | axios `ECONNABORTED` error → treated as auth failure → `401 Unauthorized` |
---
## Troubleshooting
| Symptom | Likely cause | Fix |
|---------|-------------|-----|
| `401 Unauthorized: HTTP 401` | Wrong credentials | Check `username`, `password`, `clientId` in `adapter_settings.json` |
| `401 Unauthorized: connect ECONNREFUSED` | Token service unreachable | Check `tokenUrl` is correct and reachable |
| `401 Unauthorized: token service timeout` | Network slow or token service down | Verify connectivity; check token service health |
| `401 Unauthorized: id_token missing from response` | Token service returns `access_token` only | Ensure `openid` is in `scope` and the service issues `id_token` |
| Server fails to start with `adapter_settings` not found | JSON file missing | Run Step 1 above |
| `SyntaxError` in proxy.js at startup | `import` or `export` statement in proxy.js | Remove all `import`/`export` — proxy.js must be pure VM sandbox code |

View File

@@ -0,0 +1,280 @@
# Research: OIDC Proxy Script Authentication
**Feature**: 001-oidc-proxy-script
**Phase**: 0 — Resolved unknowns
**Date**: 2025-07-17
---
## R-001 — Token Cache Persistence in VM Sandbox
**Unknown**: How can `proxy.js` maintain a token cache that survives across requests when
`server.js` creates a fresh `vm.createContext()` per request (resetting all bare `let` variables)?
**Decision**: Store the cache as a property on the `adapter_settings` object
(`adapter_settings._cache`).
**Rationale**: `server.js` loads `adapter_settings.json` into `globalVariableContext.adapter_settings`
once at startup. The per-request spread `vm.createContext({ ...globalVariableContext })` copies the
*object reference* — not a clone — into each sandbox. Any mutation to `adapter_settings._cache`
modifies the same underlying heap object and is therefore visible to every subsequent request.
Bare `let`/`const` variables at the top level of `proxy.js` do **not** persist; they are
sandbox-local and are reset to `undefined` on every invocation.
```javascript
// proxy.js — top of script (safe, no import/export)
// adapter_settings is the same JS object reference every invocation:
const _cache = adapter_settings._cache ||
(adapter_settings._cache = { token: null, expiry: 0, pendingFetch: null });
```
**Alternatives considered**:
| Alternative | Why rejected |
|-------------|-------------|
| Bare `let cachedToken` in proxy.js | Resets on every `vm.createContext()` invocation |
| Add dedicated `_cache` to `globalVariableContext` in server.js | Correct but requires server.js modification; feature spec says no server.js changes needed |
| External Redis / file cache | Introduces infrastructure dependency; spec assumption explicitly rules this out |
| Attach to `axios` or another injected object | Correct mechanism but semantically wrong; `adapter_settings` is the natural owner |
**Verification**: `globalVariableContext` is a module-level `let` in `server.js` (line 27).
The spread in `vm.createContext` (lines 172177) copies each property value by reference for
objects. `adapter_settings` is an object, so the sandbox and `globalVariableContext` share the
same reference. Confirmed by reading `src/server.js`.
---
## R-002 — Token Stampede Prevention (Promise Sharing)
**Unknown**: When multiple concurrent requests arrive while `_cache.token` is null, how do we
ensure only one HTTP request is sent to the token service?
**Decision**: Store the in-flight fetch promise on `_cache.pendingFetch`. Subsequent requests
detect a non-null `pendingFetch` (via duck-type check) and `await` the same promise. A `finally`
block clears `pendingFetch` after settlement.
**Rationale**: The `pendingFetch` property is on the shared `adapter_settings._cache` object
(same reference as R-001). All concurrent requests therefore see the same pending Promise.
**Cross-realm safety**: Each `vm.createContext()` creates a new V8 realm with its own
`Promise` constructor. `instanceof Promise` checks fail across realms. The Promises/A+ thenable
protocol (`await` and `.then()`) works via duck-typing and is cross-realm safe.
```javascript
// Stampede guard — duck-type check, NOT instanceof
if (_cache.pendingFetch !== null &&
typeof _cache.pendingFetch.then === 'function') {
// Queue on the existing fetch — await is thenable-protocol safe across V8 realms
try {
await _cache.pendingFetch;
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Authorized');
} catch (err) {
res.writeHead(401, { 'Content-Type': 'text/plain' });
res.end('Unauthorized: ' + err.message);
}
return;
}
// This invocation wins the race — build and share the fetch promise
_cache.pendingFetch = (async () => {
// ... axios.post ...
_cache.token = id_token;
_cache.expiry = expires_in; // absolute Unix epoch seconds (FR-006)
})();
try {
await _cache.pendingFetch;
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Authorized');
} catch (err) {
_cache.token = null;
_cache.expiry = 0;
res.writeHead(401, { 'Content-Type': 'text/plain' });
res.end('Unauthorized: ' + err.message);
} finally {
_cache.pendingFetch = null; // clear after all awaiters have resolved/rejected
}
```
**Why `finally` is safe for clearing `pendingFetch`**: By the time `finally` runs, all callers
that `await`-ed `_cache.pendingFetch` have already received the settled value. Setting
`pendingFetch = null` only affects future requests arriving after settlement.
**Alternatives considered**:
| Alternative | Why rejected |
|-------------|-------------|
| `instanceof Promise` guard | Fails across V8 realms — each sandbox has its own `Promise` constructor |
| Mutex / lock via integer flag | More complex; promise sharing is idiomatic in async JS |
| Separate request queue (array + callbacks) | Overkill; promise sharing achieves the same result with fewer lines |
---
## R-003 — Async Proxy Script in Synchronous `runInContext`
**Unknown**: `server.js` calls `script.runInContext(context)` synchronously without `await`.
How can `proxy.js` do async work (HTTP call) without leaving unhandled promise rejections?
**Decision**: Wrap all proxy logic in a top-level immediately-invoked async function expression
(IIFE). Catch all errors inside the IIFE and always send a response before the IIFE resolves.
**Rationale**: The async IIFE returns a Promise but `server.js` does not `await` it. The
outer `try/catch` in `server.js` only catches synchronous throws. All async errors must be
handled within `proxy.js` itself.
```javascript
// proxy.js — entire body wrapped in async IIFE
(async () => {
try {
// ... token logic ...
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Authorized');
} catch (err) {
console.error({ message: 'Auth failed', error: err.message });
res.writeHead(401, { 'Content-Type': 'text/plain' });
res.end('Unauthorized: ' + err.message);
}
})();
```
**Constraint**: `res.end()` MUST be called in both the `try` and `catch` paths. If `res.end()`
is not called, the HTTP connection hangs for the caller.
---
## R-004 — Absolute Epoch Expiry Check
**Unknown**: The spec states `expires_in` is an absolute Unix epoch timestamp (not a
relative duration). What is the correct expiry check?
**Decision**: `Date.now() / 1000 < _cache.expiry` — token is valid when current Unix time
(seconds) is strictly less than the stored `expires_in` value.
**Rationale**: Confirmed in spec (line 99): "Expiry check: `Date.now() / 1000 < expires_in`".
Example value `1532618185` is a Unix timestamp, not a duration.
```javascript
function isTokenValid() {
return _cache.token !== null && Date.now() / 1000 < _cache.expiry;
}
```
**Edge cases**:
- `expires_in` already in the past on receipt → `isTokenValid()` returns `false` immediately →
fresh token fetched (FR-006 compliance)
- `expires_in` is `0` or negative → treated as expired
- No safety buffer is applied; the spec does not require one
---
## R-005 — axios OIDC POST Pattern
**Decision**: Pass `URLSearchParams` instance as body; set `Content-Type` header explicitly for
clarity; use `timeout: 5000` for the 5-second limit (FR-014).
```javascript
const params = new URLSearchParams({
grant_type: 'password',
username: adapter_settings.username,
password: adapter_settings.password,
client_id: adapter_settings.clientId,
scope: adapter_settings.scope,
});
const response = await axios.post(adapter_settings.tokenUrl, params, {
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
timeout: 5000,
});
```
**Content-Type note**: axios v1.x auto-detects `URLSearchParams` and sets
`application/x-www-form-urlencoded` automatically. The explicit header is belt-and-suspenders
for readability in the VM sandbox context.
---
## R-006 — axios Error Differentiation
**Decision**: Treat **all** axios errors as authentication failures (FR-008). Derive the
error message from the error type for clarity in the 401 body:
| Error condition | `error.response` | `error.code` | Message strategy |
|-----------------|-----------------|--------------|-----------------|
| HTTP 4xx/5xx from token service | Populated | — | `HTTP ${error.response.status}` |
| Timeout (5 s) | `undefined` | `'ECONNABORTED'` / `'ERR_CANCELED'` | `'token service timeout'` |
| Network failure (DNS, TCP) | `undefined` | `'ERR_NETWORK'` | `error.message` |
| Missing `id_token` in response | — | — | `'id_token missing from response'` |
| Missing `adapter_settings` fields | — | — | `'missing required field: <name>'` |
All cases route to the same 401 response path, satisfying FR-008 and SC-004.
---
## R-007 — Testing VM-Sandboxed Code with `node:test`
**Decision**: Two test layers, no external test framework:
1. **Unit tests** (`tests/unit/proxy.test.js`): Compile `proxy.js` once with `new vm.Script()`.
Each test creates a controlled fake context (mock `axios`, controllable `_cache`, mock `res`).
Use `await script.runInContext(ctx)` to drive the async IIFE.
Use `t.mock.fn()` for call-count assertions; `t.mock.timers` for time-travel expiry tests.
2. **Contract tests** (`tests/contract/proxy-http.test.js`): Start an actual HTTP server with
a mock token endpoint (using `http.createServer`) and assert real HTTP responses. Validates
end-to-end behaviour including `server.js` context injection.
**Key insight**: Because `proxy.js` receives **all** dependencies via the VM context object,
dependency injection IS the test seam. No module-level mocking (`jest.mock` equivalent) is
needed or available — the context object serves the same role.
```javascript
// tests/unit/proxy.test.js — shared setup pattern
import { test, describe } from 'node:test';
import assert from 'node:assert/strict';
import vm from 'node:vm';
import { readFileSync } from 'node:fs';
import { fileURLToPath } from 'node:url';
import { dirname, join } from 'node:path';
const __dirname = dirname(fileURLToPath(import.meta.url));
const proxyCode = readFileSync(
join(__dirname, '../../src/proxyScripts/proxy.js'), 'utf-8'
);
const script = new vm.Script(proxyCode, { filename: 'proxy.js' });
function makeContext(t, overrides = {}) {
const _cache = { token: null, expiry: 0, pendingFetch: null };
let statusCode = null;
let body = '';
const res = {
writeHead: t.mock.fn((code) => { statusCode = code; }),
end: t.mock.fn((b = '') => { body += b; }),
get statusCode() { return statusCode; },
get body() { return body; },
};
return vm.createContext({
URLSearchParams,
URL,
console,
axios: {
post: t.mock.fn(async () => ({
data: { id_token: 'test-token', expires_in: 9_999_999_999 }
}))
},
adapter_settings: {
tokenUrl: 'https://auth.example.com/token',
username: 'user',
password: 'pass',
clientId: 'client',
scope: 'openid',
_cache,
},
req: { url: '/proxy', method: 'GET', headers: {} },
res,
...overrides,
});
}
```
**Run command**: `node --test tests/unit/proxy.test.js`

View File

@@ -0,0 +1,110 @@
# Feature Specification: OIDC Proxy Script Authentication
**Feature Branch**: `001-oidc-proxy-script`
**Created**: 2025-07-16
**Status**: Clarified
## User Scenarios & Testing *(mandatory)*
### User Story 1 - Successful Authenticated Request (Priority: P1)
A system operator or integration consumer sends an HTTP request through the kme-content-adapter. The proxy script transparently authenticates against the KME OIDC token service using stored credentials and, upon success, returns a confirmation response to the caller.
**Why this priority**: This is the core behaviour of the feature — without successful authentication, the proxy script provides no value. All other stories depend on this flow working correctly.
**Independent Test**: Can be tested in isolation by sending any HTTP request to the adapter's proxy endpoint and confirming a `200 OK` response with an "Authorized" body is returned, verifiable without any downstream system interaction.
**Acceptance Scenarios**:
1. **Given** the adapter is running and `adapter_settings.json` contains valid credentials and a reachable token URL, **When** an HTTP request arrives at the proxy endpoint, **Then** the script obtains a valid OIDC token and responds with HTTP `200 OK` and the body `Authorized`.
2. **Given** a valid OIDC token has already been cached and has not expired, **When** a subsequent HTTP request arrives, **Then** the script reuses the cached token without making a new authentication request, and responds with HTTP `200 OK` and the body `Authorized`.
---
### User Story 2 - Token Expiry and Refresh (Priority: P2)
The system manages token lifetime transparently. When a previously cached token has expired, the proxy script automatically obtains a fresh token before responding.
**Why this priority**: Without expiry handling, the proxy fails silently after the token lifetime ends, causing all requests to break until the adapter is restarted.
**Independent Test**: Can be tested by simulating a cached token with a past expiry time and confirming the script fetches a new token and still returns `200 OK`.
**Acceptance Scenarios**:
1. **Given** a cached token whose expiry time has passed, **When** a new HTTP request arrives, **Then** the script discards the expired token, fetches a fresh one from the token service, and responds with HTTP `200 OK` and the body `Authorized`.
2. **Given** a cached token that is still valid, **When** checking expiry, **Then** no new token request is made to the token service.
---
### User Story 3 - Authentication Failure Handling (Priority: P3)
If the token service rejects the credentials or is unreachable, the proxy script communicates the failure clearly to the caller rather than hanging or returning a misleading success.
**Why this priority**: Proper error surfacing prevents silent failures that are difficult to diagnose in production.
**Independent Test**: Can be tested by providing invalid credentials in `adapter_settings.json` and confirming the proxy returns an appropriate HTTP error response.
**Acceptance Scenarios**:
1. **Given** the credentials in `adapter_settings.json` are invalid, **When** an HTTP request arrives, **Then** the proxy script responds with HTTP `401 Unauthorized` and an error message without crashing the adapter process.
2. **Given** the token service URL is unreachable, **When** an HTTP request arrives, **Then** the proxy script responds with HTTP `401 Unauthorized` and a descriptive error message.
---
### Edge Cases
- What happens when `adapter_settings.json` is missing the `tokenUrl`, `username`, or `password` fields?
- How does the system handle a token service response that omits the `id_token` field?
- **[RESOLVED]** If `expires_in` is already in the past on arrival, treat as expired and fetch a fresh token immediately.
- **[RESOLVED]** When two concurrent requests arrive while no valid token is cached (token stampede), only one token fetch is made; all others queue and share the result.
## Requirements *(mandatory)*
### Functional Requirements
- **FR-001**: The proxy script MUST read `tokenUrl`, `username`, `password`, `clientId`, and `scope` exclusively from the `adapter_settings` injected variable (sourced from `src/globalVariables/adapter_settings.json`).
- **FR-002**: `src/globalVariables/adapter_settings.json` MUST contain the fields `tokenUrl`, `username`, `password`, `clientId`, and `scope`.
- **FR-003**: The proxy script MUST authenticate by sending a `POST` request to the configured `tokenUrl` with a `Content-Type: application/x-www-form-urlencoded` body containing `grant_type=password`, `username`, `password`, `client_id`, and `scope`.
- **FR-004**: The proxy script MUST extract the bearer token from the `id_token` field of the token service's JSON response.
- **FR-005**: The proxy script MUST cache the obtained token in Redis using `redis.hSet('authorization', 'token', token)` and `redis.hSet('authorization', 'expiry', String(expires_in))`, and reuse it for subsequent requests until it expires.
- **FR-006**: The proxy script MUST determine token expiry by reading `redis.hGet('authorization', 'expiry')` and comparing to `Date.now() / 1000`. If the stored expiry is already in the past on read, the token MUST be treated as expired and a fresh token fetched immediately.
- **FR-007**: The proxy script MUST respond with HTTP `200 OK` and the plain-text body `Authorized` when authentication succeeds.
- **FR-008**: The proxy script MUST respond with HTTP `401 Unauthorized` and a descriptive plain-text message when authentication fails (invalid credentials, unreachable token service, or malformed response).
- **FR-009**: The proxy script file (`src/proxyScripts/proxy.js`) MUST contain zero `import` or `export` statements, as it executes inside a Node.js VM sandbox.
- **FR-010**: The proxy script MUST NOT reference `config`, `global.config`, or `process.env` for any configuration or credential values.
- **FR-011**: The proxy script MUST use only dependencies injected via the VM context: `axios`, `console`, `crypto`, `jwt`, `uuidv4`, `xmlBuilder`, `URLSearchParams`, `URL`, and `redis`.
- **FR-012**: `req` and `res` must be treated as the injected Node.js HTTP request and response objects; no other I/O mechanism may be used.
- **FR-013**: When two or more concurrent requests arrive while no valid token is cached, only one token fetch request MUST be made to the token service; all other requests MUST queue and share the result of that single fetch.
- **FR-014**: The token POST request to the OIDC service MUST apply a 5-second HTTP timeout; a timeout error MUST be treated as an authentication failure (FR-008).
### Key Entities
- **adapter_settings**: Configuration object loaded from `src/globalVariables/adapter_settings.json` and injected into the VM context. Contains `tokenUrl`, `username`, `password`, `clientId`, and `scope`.
- **OIDC Token**: A short-lived bearer token issued by the KME OIDC token service. Identified by its `id_token` field; lifetime communicated via `expires_in` (Unix epoch seconds).
- **Token Cache**: Token and expiry stored in Redis under the hash key `authorization` (fields `token` and `expiry`). An in-process `pendingFetch` property on `adapter_settings` guards against stampede within the single adapter process (Promises cannot be serialised to Redis).
- **Proxy Request/Response**: The Node.js `req` and `res` HTTP objects injected into the VM, representing the inbound caller and the outbound reply.
## Success Criteria *(mandatory)*
### Measurable Outcomes
- **SC-001**: Every inbound request to the proxy endpoint receives a response (success or error) within 5 seconds under normal network conditions.
- **SC-002**: After the first successful authentication, all subsequent requests with a valid cached token complete without contacting the token service, reducing per-request authentication overhead to zero network round-trips.
- **SC-003**: Token refresh occurs automatically with no manual intervention required when a cached token expires; the caller receives `200 OK` transparently.
- **SC-004**: 100% of authentication failures (bad credentials, network errors, malformed responses) result in HTTP `401 Unauthorized` rather than an unhandled exception or process crash.
- **SC-005**: The proxy script introduces no new global state, file system access, or environment variable reads — verifiable by static inspection of the file (zero import/export/process.env/config references).
## Assumptions
- The KME OIDC token service's `expires_in` field represents an **absolute Unix epoch timestamp in seconds** (not a relative duration in seconds), as implied by the example value `1532618185`. Expiry check: `Date.now() / 1000 < expires_in`. If the value is already past on receipt, the token is immediately considered expired.
- Token and expiry are persisted in Redis (`redis.hSet / hGet` on hash key `authorization`), surviving adapter restarts and shared across any future processes.
- The in-process `pendingFetch` stampede guard is retained on `adapter_settings` because Promises cannot be serialised to Redis; this is appropriate for the current single-process deployment.
- Concurrent requests during a token fetch are queued; only one fetch is in-flight at any time (no stampede).
- Authentication failures return HTTP `401 Unauthorized` with a plain-text error body.
- The token POST request carries a 5-second timeout.
- The adapter process remains long-running; module-level variable caching is therefore effective across multiple requests without requiring an external cache.
- Only one token is needed at a time; there is no multi-tenant or per-user token requirement for this proxy script.
- The `scope` value `openid tags content_entitlements` is fixed and not expected to vary per request.
- The caller of the proxy endpoint does not require the actual OIDC token in the response body; the `200 OK / Authorized` reply is sufficient to confirm authentication succeeded.
- Error responses should be plain text to keep the script simple; no structured error body format is required.
- The VM context is always initialised with all listed dependencies (`axios`, `console`, `crypto`, `jwt`, `uuidv4`, `xmlBuilder`, `URLSearchParams`, `URL`) before the script executes.

View File

@@ -0,0 +1,365 @@
# Tasks: OIDC Proxy Script Authentication
**Feature**: `001-oidc-proxy-script`
**Input**: `specs/001-oidc-proxy-script/` — spec.md, plan.md, data-model.md, research.md, contracts/, quickstart.md
**Prerequisites**: Node.js 18+, `npm install` already run, Redis available at default port
**TDD**: Tests are written **before** each implementation task per the project constitution (plan.md §Constitution Check III). Confirm each test **fails** before writing the implementation that makes it pass.
## Format: `[ID] [P?] [Story?] Description`
- **[P]**: Parallelisable — different files, no dependencies on incomplete tasks in the same phase
- **[US1/2/3]**: Maps to User Story in spec.md
- All paths relative to repository root (`/Users/peter.morton/kme-content-adapter/`)
---
## Phase 1: Setup
**Purpose**: Create the OIDC settings files and protect credentials from being committed.
- [X] T001 Create `src/globalVariables/adapter_settings.json.example` — JSON object with the five required fields as placeholder strings: `tokenUrl`, `username`, `password`, `clientId`, `scope` (use `"https://auth.kme.example.com/protocol/openid-connect/token"`, `"service-account@example.com"`, `"changeme"`, `"kme-content-adapter"`, `"openid tags content_entitlements"`)
- [X] T002 [P] Create `src/globalVariables/adapter_settings.json` by copying `adapter_settings.json.example` — replace placeholder values with real credentials (this file is gitignored and MUST NOT be committed; real values required for contract tests and manual smoke test)
- [X] T003 [P] Add `src/globalVariables/adapter_settings.json` to `.gitignore` — append the line `src/globalVariables/adapter_settings.json`; confirm `src/globalVariables/adapter_settings.json.example` is **not** excluded; run `git status` to verify the `.json` file is untracked/ignored and the `.example` file is visible to git
---
## Phase 2: Foundational — Redis Client Wiring
**Purpose**: `proxy.js` calls `redis.hSet()` and `redis.hGet()` (instance methods on a connected client). The current `src/server.js` injects the bare `redis` module, not a connected client. This must be fixed before any proxy.js code can run.
**⚠️ CRITICAL**: No proxy.js work can be executed end-to-end until this phase is complete.
- [X] T004 Update `src/server.js` — wire up a connected Redis client and inject it into `globalVMContext`:
1. Change `import redis from "redis"``import { createClient } from "redis"`
2. After the import block, add: `const redisClient = createClient(); await redisClient.connect();` (move inside `startServer()` before `loadGlobalVariables()`, since `startServer` is already `async`)
3. In `globalVMContext`, replace `redis` with `redis: redisClient`
4. Start the server (`npm start`) and confirm no Redis connection error in the startup log
**Checkpoint**: `globalVMContext.redis` is now a live client with `hSet`/`hGet`. User story implementation can begin.
---
## Phase 3: User Story 1 — Successful Authenticated Request (Priority: P1) 🎯 MVP
**Goal**: An inbound HTTP request produces `200 OK / Authorized` whether the token must be freshly fetched or served from the Redis cache.
**Independent Test**: Send any request to the proxy endpoint; confirm `200 OK` with body `Authorized`. Verifiable with `curl http://localhost:3000/<proxy-path>` after completing T002 with real credentials.
### Tests for User Story 1 (write first — must FAIL before T007)
- [X] T005 [P] [US1] Create `tests/unit/proxy.test.js` — skeleton + two US1 tests using `node:test` and `node:vm`:
1. **`makeContext(t, overrides)`** helper: in-memory Redis fake (`_store = {}`; `hSet` and `hGet` backed by `_store[key:field]`, both `t.mock.fn()`); mock `axios.post` returning `{ data: { id_token: 'test-token', expires_in: 9_999_999_999 } }`; mock `res` with `writeHead` / `end` as `t.mock.fn()`, exposing `statusCode` and `body` getters; `adapter_settings` with all five fields set; inject `URLSearchParams`, `console`
2. **Test "cache miss → fresh fetch → 200 OK"**: empty Redis store, run `await script.runInContext(ctx)`, assert `res.statusCode === 200`, `res.body === 'Authorized'`, `axios.post.mock.calls.length === 1`
3. **Test "cache hit → no fetch → 200 OK"**: pre-seed `_store` with `'authorization:token' = 'cached-tok'` and `'authorization:expiry' = '9999999999'`, run script, assert `res.statusCode === 200`, `res.body === 'Authorized'`, `axios.post.mock.calls.length === 0`
4. Run `node --test tests/unit/proxy.test.js` — confirm both tests **fail** (`ReferenceError: proxy.js not found` is expected)
- [X] T006 [P] [US1] Create `tests/contract/proxy-http.test.js` — one US1 contract test using `node:test`, `node:http`, `node:vm`, `node:fs`:
1. Spin up an `http.createServer` mock token endpoint that responds `200` with `JSON.stringify({ id_token: 'contract-token', expires_in: 9_999_999_999 })` on any POST
2. Build a VM context with real `URLSearchParams`, `console`, the mock `axios` pointed at the mock server URL (use a real `axios` instance), a real in-memory Redis fake, and `adapter_settings` pointing `tokenUrl` at the mock server
3. Compile `proxy.js` once via `new vm.Script(readFileSync(...))` and run in context
4. Assert `res.statusCode === 200` and `res.body === 'Authorized'`; assert `Content-Type` header set to `'text/plain'`
5. Run `node --test tests/contract/proxy-http.test.js` — confirm test **fails** (`proxy.js does not exist` is expected)
### Implementation for User Story 1
- [X] T007 [US1] Create `src/proxyScripts/proxy.js` — zero `import`/`export`; full async IIFE:
```
(async () => {
try {
// 1. Validate required adapter_settings fields
// For each of ['tokenUrl','username','password','clientId','scope']:
// if (!adapter_settings[field]) throw new Error('missing required field: ' + field)
// 2. Read token cache from Redis
// const token = await redis.hGet('authorization', 'token')
// const expiry = parseFloat(await redis.hGet('authorization', 'expiry') ?? '0')
// const isValid = token !== null && Date.now() / 1000 < expiry
// 3. Cache HIT → respond immediately
// if (isValid) { res.writeHead(200, {'Content-Type':'text/plain'}); res.end('Authorized'); return; }
// 4. Cache MISS → fetch fresh token
// const params = new URLSearchParams({ grant_type:'password', username, password,
// client_id: clientId, scope })
// const response = await axios.post(tokenUrl, params,
// { headers:{'Content-Type':'application/x-www-form-urlencoded'}, timeout: 5000 })
// const { id_token, expires_in } = response.data
// if (!id_token) throw new Error('id_token missing from response')
// if (!expires_in) throw new Error('expires_in missing from response')
// 5. Write to Redis cache
// await redis.hSet('authorization', 'token', id_token)
// await redis.hSet('authorization', 'expiry', String(expires_in))
// 6. Respond success
// res.writeHead(200, {'Content-Type':'text/plain'}); res.end('Authorized')
} catch (err) {
console.error({ message: 'Auth failed', error: err.message })
res.writeHead(401, {'Content-Type':'text/plain'})
res.end('Unauthorized: ' + err.message)
}
})()
```
Run `npm run test:unit` and `npm run test:contract` — T005 and T006 tests must pass.
**Checkpoint**: US1 fully functional. `curl` the proxy endpoint → `200 OK / Authorized`.
---
## Phase 4: User Story 2 — Token Expiry and Refresh (Priority: P2)
**Goal**: An expired cached token is automatically discarded and a fresh one fetched transparently; a still-valid token is never re-fetched.
**Independent Test**: Pre-seed Redis `authorization` hash with an expiry timestamp in the past (e.g. `'1'`); send a request; confirm `200 OK / Authorized` is returned and a new token fetch occurred.
### Tests for User Story 2 (write first — must FAIL or explicitly verified against T007)
- [X] T008 [US2] Add expiry tests to `tests/unit/proxy.test.js`:
1. **Test "expired token → re-fetch → 200 OK"**: pre-seed `_store` with `'authorization:token' = 'old-tok'` and `'authorization:expiry' = '1'` (epoch far in the past); run script; assert `axios.post.mock.calls.length === 1`, `res.statusCode === 200`, `res.body === 'Authorized'`; assert Redis `hSet` was called to write new token and expiry
2. **Test "future expiry → no re-fetch → 200 OK"**: pre-seed with valid future expiry `'9999999999'`; run script; assert `axios.post.mock.calls.length === 0` and `res.statusCode === 200`
3. Run `node --test tests/unit/proxy.test.js` — if both tests **pass already** (T007's `Date.now()/1000 < expiry` check covers them), record as verified; if either **fails**, proceed to T009
### Implementation for User Story 2
- [X] T009 [US2] Verify expiry logic in `src/proxyScripts/proxy.js` — confirm `parseFloat(await redis.hGet('authorization', 'expiry') ?? '0')` and the validity check `token !== null && Date.now() / 1000 < expiry` are implemented exactly as specified; if T008 tests failed, correct the validity expression until both pass; run `npm run test:unit` to confirm green
**Checkpoint**: US1 + US2 both independently functional and tested.
---
## Phase 5: User Story 3 — Authentication Failure Handling (Priority: P3)
**Goal**: Any authentication failure (bad credentials, timeout, unreachable service, malformed response, missing config) produces `401 Unauthorized` with a descriptive message — never a crash or hang.
**Independent Test**: Provide an invalid `tokenUrl` or wrong `password` in `adapter_settings`; send a request; confirm `401 Unauthorized` with body starting `Unauthorized: `.
### Tests for User Story 3 (write first — must FAIL before T012 error differentiation)
- [X] T010 [P] [US3] Add failure unit tests to `tests/unit/proxy.test.js`:
1. **Test "HTTP 401 from token service"**: `axios.post` rejects with `{ response: { status: 401 } }`; assert `res.statusCode === 401`, `res.body === 'Unauthorized: HTTP 401'`
2. **Test "timeout (ECONNABORTED)"**: `axios.post` rejects with `{ code: 'ECONNABORTED' }`; assert `res.statusCode === 401`, `res.body === 'Unauthorized: token service timeout'`
3. **Test "timeout (ERR_CANCELED)"**: same as above but `{ code: 'ERR_CANCELED' }`; assert body `'Unauthorized: token service timeout'`
4. **Test "missing id_token in response"**: `axios.post` resolves with `{ data: { expires_in: 9999 } }` (no `id_token`); assert `res.statusCode === 401`, `res.body === 'Unauthorized: id_token missing from response'`
5. **Test "missing tokenUrl in adapter_settings"**: override `adapter_settings` with `tokenUrl: ''`; assert `res.statusCode === 401`, body matches `'Unauthorized: missing required field: tokenUrl'`
6. **Test "missing username"**: `username: undefined`; assert `401`, body `'Unauthorized: missing required field: username'`
7. Run `node --test tests/unit/proxy.test.js` — confirm failure tests FAIL (timeout test may fail because current catch sends `err.message` directly, not `'token service timeout'`)
- [X] T011 [P] [US3] Add 401 contract test to `tests/contract/proxy-http.test.js`:
1. Mock token endpoint returns HTTP `401` with empty body
2. Run proxy in VM context pointing at mock server
3. Assert `res.statusCode === 401` and `res.body` matches `/^Unauthorized: /`
4. Run `node --test tests/contract/proxy-http.test.js` — confirm test behaviour (may already pass if catch sends `err.message`; verify exact body format)
### Implementation for User Story 3
- [X] T012 [US3] Extend catch block in `src/proxyScripts/proxy.js` — refine error message derivation:
```
} catch (err) {
let message
if (err.response) {
message = 'HTTP ' + err.response.status
} else if (err.code === 'ECONNABORTED' || err.code === 'ERR_CANCELED') {
message = 'token service timeout'
} else {
message = err.message
}
console.error({ message: 'Auth failed', error: message })
res.writeHead(401, { 'Content-Type': 'text/plain' })
res.end('Unauthorized: ' + message)
}
```
Confirm field-validation `throw new Error('missing required field: ...')` and `id_token`/`expires_in` absence throws are already caught by this same block (they have no `.response` and no `.code`, so `err.message` is used — correct).
Run `npm run test:unit` and `npm run test:contract` — all T010 and T011 tests must pass.
**Checkpoint**: US1 + US2 + US3 all independently functional and tested.
---
## Phase 6: Stampede Guard (FR-013 — Cross-Cutting)
**Goal**: When two or more concurrent requests arrive with no valid cached token, exactly **one** token fetch is made to the OIDC service; all other requests queue on the same Promise and share the result.
**Independent Test**: Fire two simultaneous `script.runInContext()` calls; assert `axios.post` was called exactly once and both responses are `200 / Authorized`.
### Test for Stampede Guard (write first)
- [X] T013 [US1] Add concurrent-request unit test to `tests/unit/proxy.test.js`:
1. Create a **shared** `adapter_settings` object (no pre-seeded Redis token); slow down `axios.post` mock by 50 ms using `new Promise(resolve => setTimeout(resolve, 50))` before returning the token response
2. Fire two `script.runInContext()` calls with contexts that share the same `adapter_settings` reference (separate `res` objects for each): `const [r1, r2] = await Promise.all([run(ctx1), run(ctx2)])`
3. Assert `mockAxios.post.mock.calls.length === 1` (stampede guard prevented second fetch)
4. Assert both `res1.statusCode === 200` and `res2.statusCode === 200`
5. Run `node --test tests/unit/proxy.test.js` — confirm stampede test **fails** (currently two fetches are made)
### Implementation for Stampede Guard
- [X] T014 [US1] Extend `src/proxyScripts/proxy.js` — add `_pendingFetch` guard between the cache-miss detection and the `axios.post` call:
```
// After isValid check returns false and before axios.post:
// Queue on in-flight fetch if one is already running
if (adapter_settings._pendingFetch !== null &&
typeof adapter_settings._pendingFetch?.then === 'function') {
try {
await adapter_settings._pendingFetch
res.writeHead(200, { 'Content-Type': 'text/plain' })
res.end('Authorized')
} catch (err) {
res.writeHead(401, { 'Content-Type': 'text/plain' })
res.end('Unauthorized: ' + (err.response ? 'HTTP ' + err.response.status
: (err.code === 'ECONNABORTED' || err.code === 'ERR_CANCELED')
? 'token service timeout' : err.message))
}
return
}
// This invocation wins the race — build and share the fetch promise
adapter_settings._pendingFetch = (async () => {
// ... axios.post + Redis hSet ...
})()
try {
await adapter_settings._pendingFetch
res.writeHead(200, { 'Content-Type': 'text/plain' })
res.end('Authorized')
} catch (err) {
// ... 401 as above ...
} finally {
adapter_settings._pendingFetch = null
}
```
Use duck-type check (`typeof .then === 'function'`) — **not** `instanceof Promise` (fails across V8 realms per research.md R-002).
Run `npm run test:unit` — T013 stampede test must pass.
**Checkpoint**: All user stories complete and independently tested. Full `npm test` should be green.
---
## Final Phase: Polish & Verification
- [X] T015 [P] Static analysis of `src/proxyScripts/proxy.js` — run the following; all must return zero matches:
```bash
grep -n 'import\|export' src/proxyScripts/proxy.js # FR-009
grep -n 'process\.env\|config\b\|global\.config' src/proxyScripts/proxy.js # FR-010
```
If any match is found, remove the offending line and re-run the full test suite.
- [X] T016 [P] Verify `.gitignore` and file tracking — run:
```bash
git status src/globalVariables/
```
Confirm `adapter_settings.json` shows as **ignored** (not staged, not untracked in output) and `adapter_settings.json.example` is **tracked** (shows in `git ls-files src/globalVariables/`). If `adapter_settings.json` is untracked (not ignored), verify T003 was applied to the correct `.gitignore` path.
- [X] T017 Run full test suite `npm test` — all unit and contract tests pass with zero failures; if any test fails, fix the root cause in `src/proxyScripts/proxy.js` or the relevant test file before marking complete
- [ ] T018 Validate quickstart — follow `specs/001-oidc-proxy-script/quickstart.md`:
1. `npm start` — confirm structured JSON startup log includes `"Loaded global data: adapter_settings"` with keys `["tokenUrl","username","password","clientId","scope"]`
2. `curl -v http://localhost:3000/<proxy-path>` — confirm `HTTP/1.1 200 OK` and body `Authorized`
3. Send a second request — confirm adapter log shows **no** new `axios.post` call (cache hit)
4. If response is `401`, check credentials in `adapter_settings.json` and Redis connectivity
---
## Dependencies & Execution Order
### Phase Dependencies
```
Phase 1 (T001-T003) ──────────────────────────► no dependencies; start immediately
Phase 2 (T004) ──── after T001 ──────────► BLOCKS all proxy.js execution
Phase 3 (T005-T007) ──── after T001, T004 ────► US1 MVP; T005 ∥ T006 before T007
Phase 4 (T008-T009) ──── after T007 ──────────► US2 expiry; adds to test file from T005
Phase 5 (T010-T012) ──── after T007 ──────────► US3 failures; T010 ∥ T011 before T012
Phase 6 (T013-T014) ──── after T007 ──────────► Stampede guard; T013 before T014
Final Phase (T015-T018) ── after T014 ──────────► Polish; T015 ∥ T016 before T017 → T018
```
### User Story Dependencies
| Story | Depends on | Independent of |
|-------|-----------|---------------|
| **US1** (P1, T005-T007) | Phase 2 complete | US2, US3 |
| **US2** (P2, T008-T009) | T007 complete | US3 (fully independent) |
| **US3** (P3, T010-T012) | T007 complete | US2 (fully independent) |
| **Stampede** (T013-T014) | T007 complete | US2, US3 |
Phases 4, 5, and 6 can all begin in parallel once T007 is merged (they add to different test blocks; use separate git worktrees or feature branches if pairing).
### Within Each Phase
- Tests **before** implementation (constitutional requirement)
- Test files in same story can be written in parallel (T005 ∥ T006, T010 ∥ T011)
- Implementation tasks within a phase are sequential (single `proxy.js` file)
---
## Parallel Execution Examples
### Phase 3 (US1): Tests in parallel
```
Task A: T005 — Write tests/unit/proxy.test.js (makeContext + cache-hit/miss tests)
Task B: T006 — Write tests/contract/proxy-http.test.js (200 OK contract test)
↓ both confirm FAIL
Task C: T007 — Implement src/proxyScripts/proxy.js (makes both pass)
```
### Phase 5 (US3): Tests in parallel
```
Task A: T010 — Add unit failure tests to tests/unit/proxy.test.js
Task B: T011 — Add 401 contract test to tests/contract/proxy-http.test.js
↓ both confirm FAIL (or partial pass)
Task C: T012 — Extend proxy.js catch block (makes both pass)
```
### Final Phase: Polish in parallel
```
Task A: T015 — Static analysis (grep check)
Task B: T016 — .gitignore verification
↓ both must clear before
Task C: T017 — npm test (full suite)
Task D: T018 — Quickstart smoke test
```
---
## Implementation Strategy
### MVP (User Story 1 Only)
1. Complete **Phase 1** (T001-T003): Setup files
2. Complete **Phase 2** (T004): Wire Redis client in server.js
3. Complete **Phase 3** (T005-T007): US1 tests + proxy.js core implementation
4. **STOP and VALIDATE**: `curl` the proxy endpoint → `200 OK / Authorized`
5. Run `npm run test:unit && npm run test:contract` — US1 green
6. Deploy / demo if ready
### Incremental Delivery
1. MVP above → US1 shipped
2. Add **Phase 4** (T008-T009) → expiry/refresh tested → US2 shipped
3. Add **Phase 5** (T010-T012) → failure handling tested → US3 shipped
4. Add **Phase 6** (T013-T014) → stampede guard tested → concurrent safety shipped
5. **Final Phase** (T015-T018) → static analysis + smoke test → feature complete
---
## Notes
- **proxy.js has zero `import`/`export`** — all dependencies arrive via `vm.createContext()`; tested by T015 grep
- **Redis stores token + expiry** (hash key `authorization`, fields `token` + `expiry`); `adapter_settings._pendingFetch` holds the stampede guard (Promises cannot be serialised to Redis)
- **`expires_in` is an absolute Unix epoch timestamp** (not a relative duration); validity: `Date.now() / 1000 < expiry`
- **All auth failures → `401 Unauthorized`**, never a crash (SC-004); top-level catch in async IIFE handles every error path
- **`t.mock.fn()`** from `node:test` provides call-count assertions without any external test framework
- **Duck-type stampede check** (`typeof .then === 'function'`) is required — `instanceof Promise` fails across V8 realms (research.md R-002)
- Commit after each task or logical group; each checkpoint is a safe stopping point

View File

@@ -0,0 +1,7 @@
{
"tokenUrl": "https://auth.kme.example.com/protocol/openid-connect/token",
"username": "service-account@example.com",
"password": "changeme",
"clientId": "kme-content-adapter",
"scope": "openid tags content_entitlements"
}

View File

@@ -0,0 +1,100 @@
(async () => {
try {
// 1. Validate required kme_CSA_settings fields
const requiredFields = ['tokenUrl', 'username', 'password', 'clientId', 'scope'];
for (const field of requiredFields) {
if (!kme_CSA_settings[field]) {
throw new Error('missing required field: ' + field);
}
}
const { tokenUrl, username, clientId, scope } = kme_CSA_settings;
// 2. Read token cache from Redis
console.debug({ message: 'Checking token cache', url: req.url, method: req.method });
const token = await redis.hGet('authorization', 'token');
const expiry = parseFloat(await redis.hGet('authorization', 'expiry') ?? '0');
const isValid = token !== null && Date.now() / 1000 < expiry;
// 3. Cache HIT → respond immediately
if (isValid) {
console.debug({ message: 'Token cache hit', expiresIn: Math.round(expiry - Date.now() / 1000) + 's' });
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Authorized');
return;
}
// 4. Stampede guard — if a fetch is already in flight, queue on it
if (kme_CSA_settings._pendingFetch && typeof kme_CSA_settings._pendingFetch.then === 'function') {
console.debug({ message: 'Token fetch in flight, queuing request' });
await kme_CSA_settings._pendingFetch;
console.debug({ message: 'Queued request unblocked, responding' });
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Authorized');
return;
}
// 5. Cache MISS → fetch fresh token
console.info({ message: 'Token cache miss, fetching fresh token', tokenUrl });
const params = new URLSearchParams({
grant_type: 'password',
username,
password: kme_CSA_settings.password,
client_id: clientId,
scope,
});
// Set up stampede guard before fetching
let resolvePending;
let rejectPending;
kme_CSA_settings._pendingFetch = new Promise((resolve, reject) => {
resolvePending = resolve;
rejectPending = reject;
});
// Prevent an unhandled-rejection when no concurrent request is waiting on this promise
kme_CSA_settings._pendingFetch.catch(() => {});
try {
console.debug({ message: 'Requesting new token', url: tokenUrl, method: 'POST' });
const response = await axios.post(tokenUrl, params, {
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
timeout: 5000,
});
const { id_token, expires_in } = response.data;
if (!id_token) throw new Error('id_token missing from response');
if (!expires_in) throw new Error('expires_in missing from response');
// 6. Write to Redis cache
await redis.hSet('authorization', 'token', id_token);
await redis.hSet('authorization', 'expiry', String(expires_in));
console.info({ message: 'Token fetched and cached', expiresAt: new Date(expires_in * 1000).toISOString() });
// Resolve the pending fetch promise so waiting requests can proceed
resolvePending();
} catch (fetchErr) {
console.error({ message: 'Token fetch failed', error: fetchErr.message, code: fetchErr.code });
rejectPending(fetchErr);
throw fetchErr;
} finally {
kme_CSA_settings._pendingFetch = null;
}
// 7. Respond success
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Authorized');
} catch (err) {
let message;
if (err.response) {
message = 'HTTP ' + err.response.status;
} else if (err.code === 'ECONNABORTED' || err.code === 'ERR_CANCELED') {
message = 'token service timeout';
} else {
message = err.message;
}
console.error({ message: 'Auth failed', error: message, url: req.url });
res.writeHead(401, { 'Content-Type': 'text/plain' });
res.end('Unauthorized: ' + message);
}
})()

View File

@@ -9,6 +9,7 @@ import { v4 as uuidv4 } from "uuid";
import jwt from "jsonwebtoken";
import { create as xmlBuilder } from "xmlbuilder2";
import { logger } from "./logger.js";
import { createClient } from "redis";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
@@ -122,6 +123,11 @@ function validateConfig(config) {
*/
async function startServer() {
try {
// Connect Redis client before loading global variables
const redisClient = createClient();
await redisClient.connect();
globalVMContext.redis = redisClient;
// Load configuration into global.config
global.config = loadConfig();
@@ -143,7 +149,7 @@ async function startServer() {
validateConfig(global.config);
logger.info("Configuration validated successfully");
const proxyPath = join(__dirname, "proxyScripts", "proxy.js");
const proxyPath = join(__dirname, "proxyScripts", "kmeContentSourceAdapter.js");
const proxyCode = readFileSync(proxyPath, "utf-8");
const script = new vm.Script(proxyCode, { filename: "proxy.js" });

View File

@@ -0,0 +1,137 @@
import { test, describe } from 'node:test';
import assert from 'node:assert/strict';
import vm from 'node:vm';
import http from 'node:http';
import { readFileSync } from 'node:fs';
import { fileURLToPath } from 'node:url';
import { dirname, join } from 'node:path';
import axios from 'axios';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const proxyPath = join(__dirname, '../../src/proxyScripts/kmeContentSourceAdapter.js');
const proxyCode = readFileSync(proxyPath, 'utf-8');
const proxyScript = new vm.Script(proxyCode, { filename: 'kmeContentSourceAdapter.js' });
/**
* Start a minimal HTTP server that handles all POST requests with a fixed JSON body.
* @param {number} statusCode
* @param {object} responseBody
* @returns {Promise<{ server: http.Server, url: string, close: () => Promise<void> }>}
*/
function startMockTokenServer(statusCode, responseBody) {
return new Promise((resolve, reject) => {
const server = http.createServer((req, res) => {
res.writeHead(statusCode, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(responseBody));
});
server.listen(0, '127.0.0.1', () => {
const { port } = server.address();
const url = `http://127.0.0.1:${port}`;
const close = () => new Promise((res, rej) => server.close(err => err ? rej(err) : res()));
resolve({ server, url, close });
});
server.once('error', reject);
});
}
/** Build an in-memory Redis fake. */
function makeRedisFake() {
const _store = {};
return {
hSet: async (key, field, value) => { _store[`${key}:${field}`] = value; return 1; },
hGet: async (key, field) => _store[`${key}:${field}`] ?? null,
};
}
/** Build a capturable res object. */
function makeRes() {
let statusCode = null;
let body = '';
const headers = {};
return {
writeHead: (code, hdrs = {}) => { statusCode = code; Object.assign(headers, hdrs); },
end: (b = '') => { body += String(b); },
get statusCode() { return statusCode; },
get body() { return body; },
get headers() { return headers; },
};
}
// ---------------------------------------------------------------------------
// Contract: 200 OK — successful OIDC token fetch
// ---------------------------------------------------------------------------
describe('proxy HTTP contract: 200 OK', () => {
test('fresh token fetch → 200 Authorized with Content-Type text/plain', async () => {
const mock = await startMockTokenServer(200, {
id_token: 'contract-token',
expires_in: 9_999_999_999,
});
try {
const res = makeRes();
const ctx = vm.createContext({
URLSearchParams,
console,
axios,
redis: makeRedisFake(),
kme_CSA_settings: {
tokenUrl: mock.url,
username: 'user',
password: 'pass',
clientId: 'client',
scope: 'openid',
},
req: { url: '/', method: 'GET', headers: {} },
res,
});
await proxyScript.runInContext(ctx);
assert.strictEqual(res.statusCode, 200);
assert.strictEqual(res.body, 'Authorized');
assert.strictEqual(res.headers['Content-Type'], 'text/plain');
} finally {
await mock.close();
}
});
});
// ---------------------------------------------------------------------------
// Contract: 401 Unauthorized — token service returns 4xx
// ---------------------------------------------------------------------------
describe('proxy HTTP contract: 401 Unauthorized', () => {
test('token service 401 → proxy 401 with Unauthorized: prefix', async () => {
const mock = await startMockTokenServer(401, {});
try {
const res = makeRes();
const ctx = vm.createContext({
URLSearchParams,
console,
axios,
redis: makeRedisFake(),
kme_CSA_settings: {
tokenUrl: mock.url,
username: 'bad-user',
password: 'bad-pass',
clientId: 'client',
scope: 'openid',
},
req: { url: '/', method: 'GET', headers: {} },
res,
});
await proxyScript.runInContext(ctx);
assert.strictEqual(res.statusCode, 401);
assert.match(res.body, /^Unauthorized: .+/);
assert.strictEqual(res.headers['Content-Type'], 'text/plain');
} finally {
await mock.close();
}
});
});

311
tests/unit/proxy.test.js Normal file
View File

@@ -0,0 +1,311 @@
import { test, describe } from 'node:test';
import assert from 'node:assert/strict';
import vm from 'node:vm';
import { readFileSync } from 'node:fs';
import { fileURLToPath } from 'node:url';
import { dirname, join } from 'node:path';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const proxyPath = join(__dirname, '../../src/proxyScripts/kmeContentSourceAdapter.js');
const proxyCode = readFileSync(proxyPath, 'utf-8');
const proxyScript = new vm.Script(proxyCode, { filename: 'kmeContentSourceAdapter.js' });
/**
* Build a minimal VM context satisfying the vm-context contract.
* @param {import('node:test').TestContext} t
* @param {object} [overrides] shallow-merge over defaults
*/
function makeContext(t, overrides = {}) {
const _store = {};
const redis = {
hSet: t.mock.fn(async (key, field, value) => {
_store[`${key}:${field}`] = value;
return 1;
}),
hGet: t.mock.fn(async (key, field) => _store[`${key}:${field}`] ?? null),
};
let statusCode = null;
let body = '';
const headers = {};
const res = {
writeHead: t.mock.fn((code, hdrs = {}) => {
statusCode = code;
Object.assign(headers, hdrs);
}),
end: t.mock.fn((b = '') => { body += String(b); }),
get statusCode() { return statusCode; },
get body() { return body; },
get headers() { return headers; },
};
const kme_CSA_settings = {
tokenUrl: 'https://auth.example.com/token',
username: 'testuser',
password: 'testpass',
clientId: 'test-client',
scope: 'openid',
};
const axiosMock = {
post: t.mock.fn(async () => ({
data: { id_token: 'mock-token', expires_in: 9_999_999_999 },
})),
};
const ctx = vm.createContext({
URLSearchParams,
console,
axios: axiosMock,
redis,
kme_CSA_settings,
req: { url: '/', method: 'GET', headers: {} },
res,
...overrides,
});
// Expose helpers for assertion
ctx._redis = redis;
ctx._res = res;
ctx._store = _store;
ctx._axios = axiosMock;
return ctx;
}
/** Run the proxy script in a context and await the async IIFE completing. */
async function runScript(ctx) {
// script.runInContext() returns the Promise from the async IIFE
const result = proxyScript.runInContext(ctx);
if (result && typeof result.then === 'function') {
await result;
}
}
// ---------------------------------------------------------------------------
// User Story 1 — Successful Authenticated Request
// ---------------------------------------------------------------------------
describe('US1: successful authenticated request', () => {
test('cache miss → fresh fetch → 200 OK', async (t) => {
const ctx = makeContext(t);
await runScript(ctx);
assert.strictEqual(ctx._res.statusCode, 200);
assert.strictEqual(ctx._res.body, 'Authorized');
assert.strictEqual(ctx._axios.post.mock.calls.length, 1);
});
test('cache hit → no fetch → 200 OK', async (t) => {
const ctx = makeContext(t, {});
// Pre-seed Redis store via the fake
ctx._store['authorization:token'] = 'cached-tok';
ctx._store['authorization:expiry'] = '9999999999';
await runScript(ctx);
assert.strictEqual(ctx._res.statusCode, 200);
assert.strictEqual(ctx._res.body, 'Authorized');
assert.strictEqual(ctx._axios.post.mock.calls.length, 0);
});
});
// ---------------------------------------------------------------------------
// User Story 2 — Token Expiry and Refresh
// ---------------------------------------------------------------------------
describe('US2: token expiry and refresh', () => {
test('expired token → re-fetch → 200 OK', async (t) => {
const ctx = makeContext(t);
ctx._store['authorization:token'] = 'old-tok';
ctx._store['authorization:expiry'] = '1'; // epoch far in the past
await runScript(ctx);
assert.strictEqual(ctx._axios.post.mock.calls.length, 1, 'should re-fetch');
assert.strictEqual(ctx._res.statusCode, 200);
assert.strictEqual(ctx._res.body, 'Authorized');
// New token should have been written to Redis
const hSetCalls = ctx._redis.hSet.mock.calls;
assert.ok(hSetCalls.length >= 2, 'hSet should be called for token and expiry');
});
test('future expiry → no re-fetch → 200 OK', async (t) => {
const ctx = makeContext(t);
ctx._store['authorization:token'] = 'fresh-tok';
ctx._store['authorization:expiry'] = '9999999999';
await runScript(ctx);
assert.strictEqual(ctx._axios.post.mock.calls.length, 0, 'should not re-fetch');
assert.strictEqual(ctx._res.statusCode, 200);
assert.strictEqual(ctx._res.body, 'Authorized');
});
});
// ---------------------------------------------------------------------------
// User Story 3 — Authentication Failure Handling
// ---------------------------------------------------------------------------
describe('US3: authentication failure handling', () => {
test('HTTP 401 from token service → 401 Unauthorized: HTTP 401', async (t) => {
const axiosError = Object.assign(new Error('Request failed with status code 401'), {
response: { status: 401 },
});
const ctx = makeContext(t, {
axios: { post: t.mock.fn(async () => { throw axiosError; }) },
});
await runScript(ctx);
assert.strictEqual(ctx._res.statusCode, 401);
assert.strictEqual(ctx._res.body, 'Unauthorized: HTTP 401');
});
test('timeout (ECONNABORTED) → 401 Unauthorized: token service timeout', async (t) => {
const axiosError = Object.assign(new Error('timeout'), { code: 'ECONNABORTED' });
const ctx = makeContext(t, {
axios: { post: t.mock.fn(async () => { throw axiosError; }) },
});
await runScript(ctx);
assert.strictEqual(ctx._res.statusCode, 401);
assert.strictEqual(ctx._res.body, 'Unauthorized: token service timeout');
});
test('timeout (ERR_CANCELED) → 401 Unauthorized: token service timeout', async (t) => {
const axiosError = Object.assign(new Error('canceled'), { code: 'ERR_CANCELED' });
const ctx = makeContext(t, {
axios: { post: t.mock.fn(async () => { throw axiosError; }) },
});
await runScript(ctx);
assert.strictEqual(ctx._res.statusCode, 401);
assert.strictEqual(ctx._res.body, 'Unauthorized: token service timeout');
});
test('missing id_token in response → 401 Unauthorized: id_token missing from response', async (t) => {
const ctx = makeContext(t, {
axios: {
post: t.mock.fn(async () => ({ data: { expires_in: 9999 } })),
},
});
await runScript(ctx);
assert.strictEqual(ctx._res.statusCode, 401);
assert.strictEqual(ctx._res.body, 'Unauthorized: id_token missing from response');
});
test('missing expires_in in response → 401 Unauthorized: expires_in missing from response', async (t) => {
const ctx = makeContext(t, {
axios: {
post: t.mock.fn(async () => ({ data: { id_token: 'a-token' } })),
},
});
await runScript(ctx);
assert.strictEqual(ctx._res.statusCode, 401);
assert.strictEqual(ctx._res.body, 'Unauthorized: expires_in missing from response');
});
test('missing tokenUrl in kme_CSA_settings → 401 missing required field: tokenUrl', async (t) => {
const ctx = makeContext(t);
ctx.kme_CSA_settings.tokenUrl = '';
await runScript(ctx);
assert.strictEqual(ctx._res.statusCode, 401);
assert.strictEqual(ctx._res.body, 'Unauthorized: missing required field: tokenUrl');
});
test('missing username in kme_CSA_settings → 401 missing required field: username', async (t) => {
const ctx = makeContext(t);
ctx.kme_CSA_settings.username = undefined;
await runScript(ctx);
assert.strictEqual(ctx._res.statusCode, 401);
assert.strictEqual(ctx._res.body, 'Unauthorized: missing required field: username');
});
});
// ---------------------------------------------------------------------------
// Phase 6 — Stampede Guard (FR-013)
// ---------------------------------------------------------------------------
describe('stampede guard', () => {
test('concurrent requests → exactly one fetch, both get 200', async (t) => {
// Shared kme_CSA_settings across both contexts (same reference)
const kme_CSA_settings = {
tokenUrl: 'https://auth.example.com/token',
username: 'testuser',
password: 'testpass',
clientId: 'test-client',
scope: 'openid',
};
// Shared Redis store
const _store = {};
const redis = {
hSet: t.mock.fn(async (key, field, value) => {
_store[`${key}:${field}`] = value;
return 1;
}),
hGet: t.mock.fn(async (key, field) => _store[`${key}:${field}`] ?? null),
};
// Slow axios mock — 50ms delay before returning token
const mockAxiosPost = t.mock.fn(async () => {
await new Promise(resolve => setTimeout(resolve, 50));
return { data: { id_token: 'stampede-token', expires_in: 9_999_999_999 } };
});
const sharedAxios = { post: mockAxiosPost };
// Build two contexts sharing kme_CSA_settings, redis, and axios references
function makeRes(tctx) {
let statusCode = null;
let body = '';
return {
writeHead: tctx.mock.fn((code) => { statusCode = code; }),
end: tctx.mock.fn((b = '') => { body += String(b); }),
get statusCode() { return statusCode; },
get body() { return body; },
};
}
const res1 = makeRes(t);
const res2 = makeRes(t);
const ctx1 = vm.createContext({
URLSearchParams, console, axios: sharedAxios,
redis, kme_CSA_settings,
req: { url: '/', method: 'GET', headers: {} },
res: res1,
});
const ctx2 = vm.createContext({
URLSearchParams, console, axios: sharedAxios,
redis, kme_CSA_settings,
req: { url: '/', method: 'GET', headers: {} },
res: res2,
});
// Fire both concurrently
const p1 = proxyScript.runInContext(ctx1);
const p2 = proxyScript.runInContext(ctx2);
await Promise.all([p1, p2]);
assert.strictEqual(mockAxiosPost.mock.calls.length, 1, 'stampede guard: only one fetch');
assert.strictEqual(res1.statusCode, 200);
assert.strictEqual(res2.statusCode, 200);
assert.strictEqual(res1.body, 'Authorized');
assert.strictEqual(res2.body, 'Authorized');
});
});