mirror of
https://github.com/affaan-m/everything-claude-code.git
synced 2026-02-16 19:33:11 +08:00
Coverage improvements: - validate-agents: empty frontmatter block, no-content frontmatter, partial frontmatter, mixed valid/invalid agents - validate-rules: directory with .md name (stat.isFile check), deeply nested subdirectory rules - validate-commands: 3-agent workflow chain, broken middle agent - post-edit-typecheck: spaces in paths, shell metacharacters, .tsx - check-console-log: git failure passthrough, large stdin - post-edit-console-warn: console.error only, null tool_input - session-end: empty transcript, whitespace-only transcript Total tests: 686 → 703
1539 lines
68 KiB
JavaScript
1539 lines
68 KiB
JavaScript
/**
|
|
* Tests for CI validator scripts
|
|
*
|
|
* Tests both success paths (against the real project) and error paths
|
|
* (against temporary fixture directories via wrapper scripts).
|
|
*
|
|
* Run with: node tests/ci/validators.test.js
|
|
*/
|
|
|
|
const assert = require('assert');
|
|
const path = require('path');
|
|
const fs = require('fs');
|
|
const os = require('os');
|
|
const { execFileSync } = require('child_process');
|
|
|
|
const validatorsDir = path.join(__dirname, '..', '..', 'scripts', 'ci');
|
|
|
|
// Test helpers
|
|
function test(name, fn) {
|
|
try {
|
|
fn();
|
|
console.log(` \u2713 ${name}`);
|
|
return true;
|
|
} catch (err) {
|
|
console.log(` \u2717 ${name}`);
|
|
console.log(` Error: ${err.message}`);
|
|
return false;
|
|
}
|
|
}
|
|
|
|
function createTestDir() {
|
|
return fs.mkdtempSync(path.join(os.tmpdir(), 'ci-validator-test-'));
|
|
}
|
|
|
|
function cleanupTestDir(testDir) {
|
|
fs.rmSync(testDir, { recursive: true, force: true });
|
|
}
|
|
|
|
/**
|
|
* Run a validator script via a wrapper that overrides its directory constant.
|
|
* This allows testing error cases without modifying real project files.
|
|
*
|
|
* @param {string} validatorName - e.g., 'validate-agents'
|
|
* @param {string} dirConstant - the constant name to override (e.g., 'AGENTS_DIR')
|
|
* @param {string} overridePath - the temp directory to use
|
|
* @returns {{code: number, stdout: string, stderr: string}}
|
|
*/
|
|
function runValidatorWithDir(validatorName, dirConstant, overridePath) {
|
|
const validatorPath = path.join(validatorsDir, `${validatorName}.js`);
|
|
|
|
// Read the validator source, replace the directory constant, and run as a wrapper
|
|
let source = fs.readFileSync(validatorPath, 'utf8');
|
|
|
|
// Remove the shebang line
|
|
source = source.replace(/^#!.*\n/, '');
|
|
|
|
// Replace the directory constant with our override path
|
|
const dirRegex = new RegExp(`const ${dirConstant} = .*?;`);
|
|
source = source.replace(dirRegex, `const ${dirConstant} = ${JSON.stringify(overridePath)};`);
|
|
|
|
try {
|
|
const stdout = execFileSync('node', ['-e', source], {
|
|
encoding: 'utf8',
|
|
stdio: ['pipe', 'pipe', 'pipe'],
|
|
timeout: 10000,
|
|
});
|
|
return { code: 0, stdout, stderr: '' };
|
|
} catch (err) {
|
|
return {
|
|
code: err.status || 1,
|
|
stdout: err.stdout || '',
|
|
stderr: err.stderr || '',
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Run a validator script with multiple directory overrides.
|
|
* @param {string} validatorName
|
|
* @param {Record<string, string>} overrides - map of constant name to path
|
|
*/
|
|
function runValidatorWithDirs(validatorName, overrides) {
|
|
const validatorPath = path.join(validatorsDir, `${validatorName}.js`);
|
|
let source = fs.readFileSync(validatorPath, 'utf8');
|
|
source = source.replace(/^#!.*\n/, '');
|
|
for (const [constant, overridePath] of Object.entries(overrides)) {
|
|
const dirRegex = new RegExp(`const ${constant} = .*?;`);
|
|
source = source.replace(dirRegex, `const ${constant} = ${JSON.stringify(overridePath)};`);
|
|
}
|
|
try {
|
|
const stdout = execFileSync('node', ['-e', source], {
|
|
encoding: 'utf8',
|
|
stdio: ['pipe', 'pipe', 'pipe'],
|
|
timeout: 10000,
|
|
});
|
|
return { code: 0, stdout, stderr: '' };
|
|
} catch (err) {
|
|
return {
|
|
code: err.status || 1,
|
|
stdout: err.stdout || '',
|
|
stderr: err.stderr || '',
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Run a validator script directly (tests real project)
|
|
*/
|
|
function runValidator(validatorName) {
|
|
const validatorPath = path.join(validatorsDir, `${validatorName}.js`);
|
|
try {
|
|
const stdout = execFileSync('node', [validatorPath], {
|
|
encoding: 'utf8',
|
|
stdio: ['pipe', 'pipe', 'pipe'],
|
|
timeout: 15000,
|
|
});
|
|
return { code: 0, stdout, stderr: '' };
|
|
} catch (err) {
|
|
return {
|
|
code: err.status || 1,
|
|
stdout: err.stdout || '',
|
|
stderr: err.stderr || '',
|
|
};
|
|
}
|
|
}
|
|
|
|
function runTests() {
|
|
console.log('\n=== Testing CI Validators ===\n');
|
|
|
|
let passed = 0;
|
|
let failed = 0;
|
|
|
|
// ==========================================
|
|
// validate-agents.js
|
|
// ==========================================
|
|
console.log('validate-agents.js:');
|
|
|
|
if (test('passes on real project agents', () => {
|
|
const result = runValidator('validate-agents');
|
|
assert.strictEqual(result.code, 0, `Should pass, got stderr: ${result.stderr}`);
|
|
assert.ok(result.stdout.includes('Validated'), 'Should output validation count');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on agent without frontmatter', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'bad-agent.md'), '# No frontmatter here\nJust content.');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should exit 1 for missing frontmatter');
|
|
assert.ok(result.stderr.includes('Missing frontmatter'), 'Should report missing frontmatter');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on agent missing required model field', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'no-model.md'), '---\ntools: Read, Write\n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should exit 1 for missing model');
|
|
assert.ok(result.stderr.includes('model'), 'Should report missing model field');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on agent missing required tools field', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'no-tools.md'), '---\nmodel: sonnet\n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should exit 1 for missing tools');
|
|
assert.ok(result.stderr.includes('tools'), 'Should report missing tools field');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('passes on valid agent with all required fields', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'good-agent.md'), '---\nmodel: sonnet\ntools: Read, Write\n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should pass for valid agent');
|
|
assert.ok(result.stdout.includes('Validated 1'), 'Should report 1 validated');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('handles frontmatter with BOM and CRLF', () => {
|
|
const testDir = createTestDir();
|
|
const content = '\uFEFF---\r\nmodel: sonnet\r\ntools: Read, Write\r\n---\r\n# Agent';
|
|
fs.writeFileSync(path.join(testDir, 'bom-agent.md'), content);
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should handle BOM and CRLF');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('handles frontmatter with colons in values', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'colon-agent.md'), '---\nmodel: sonnet\ntools: Read, Write, Bash\ndescription: Run this: always check: everything\n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should handle colons in values');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('skips non-md files', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'readme.txt'), 'Not an agent');
|
|
fs.writeFileSync(path.join(testDir, 'valid.md'), '---\nmodel: sonnet\ntools: Read\n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should only validate .md files');
|
|
assert.ok(result.stdout.includes('Validated 1'), 'Should count only .md files');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('exits 0 when directory does not exist', () => {
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', '/nonexistent/dir');
|
|
assert.strictEqual(result.code, 0, 'Should skip when no agents dir');
|
|
assert.ok(result.stdout.includes('skipping'), 'Should say skipping');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects agent with empty model value', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'empty.md'), '---\nmodel:\ntools: Read, Write\n---\n# Empty model');
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject empty model');
|
|
assert.ok(result.stderr.includes('model'), 'Should mention model field');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects agent with empty tools value', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'empty.md'), '---\nmodel: claude-sonnet-4-5-20250929\ntools:\n---\n# Empty tools');
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject empty tools');
|
|
assert.ok(result.stderr.includes('tools'), 'Should mention tools field');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ==========================================
|
|
// validate-hooks.js
|
|
// ==========================================
|
|
console.log('\nvalidate-hooks.js:');
|
|
|
|
if (test('passes on real project hooks.json', () => {
|
|
const result = runValidator('validate-hooks');
|
|
assert.strictEqual(result.code, 0, `Should pass, got stderr: ${result.stderr}`);
|
|
assert.ok(result.stdout.includes('Validated'), 'Should output validation count');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('exits 0 when hooks.json does not exist', () => {
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', '/nonexistent/hooks.json');
|
|
assert.strictEqual(result.code, 0, 'Should skip when no hooks.json');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on invalid JSON', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, '{ not valid json }}}');
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on invalid JSON');
|
|
assert.ok(result.stderr.includes('Invalid JSON'), 'Should report invalid JSON');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on invalid event type', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
InvalidEventType: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo hi' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on invalid event type');
|
|
assert.ok(result.stderr.includes('Invalid event type'), 'Should report invalid event type');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on hook entry missing type field', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ command: 'echo hi' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on missing type');
|
|
assert.ok(result.stderr.includes('type'), 'Should report missing type');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on hook entry missing command field', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on missing command');
|
|
assert.ok(result.stderr.includes('command'), 'Should report missing command');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on invalid async field type', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo', async: 'yes' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on non-boolean async');
|
|
assert.ok(result.stderr.includes('async'), 'Should report async type error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on negative timeout', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo', timeout: -5 }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on negative timeout');
|
|
assert.ok(result.stderr.includes('timeout'), 'Should report timeout error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on invalid inline JS syntax', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'node -e "function {"' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on invalid inline JS');
|
|
assert.ok(result.stderr.includes('invalid inline JS'), 'Should report JS syntax error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('passes valid inline JS commands', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'node -e "console.log(1+2)"' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 0, 'Should pass valid inline JS');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('validates array command format', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: ['node', '-e', 'console.log(1)'] }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 0, 'Should accept array command format');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('validates legacy array format', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify([
|
|
{ matcher: 'test', hooks: [{ type: 'command', command: 'echo ok' }] }
|
|
]));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 0, 'Should accept legacy array format');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on matcher missing hooks array', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test' }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on missing hooks array');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ==========================================
|
|
// validate-skills.js
|
|
// ==========================================
|
|
console.log('\nvalidate-skills.js:');
|
|
|
|
if (test('passes on real project skills', () => {
|
|
const result = runValidator('validate-skills');
|
|
assert.strictEqual(result.code, 0, `Should pass, got stderr: ${result.stderr}`);
|
|
assert.ok(result.stdout.includes('Validated'), 'Should output validation count');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('exits 0 when directory does not exist', () => {
|
|
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', '/nonexistent/dir');
|
|
assert.strictEqual(result.code, 0, 'Should skip when no skills dir');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on skill directory without SKILL.md', () => {
|
|
const testDir = createTestDir();
|
|
fs.mkdirSync(path.join(testDir, 'broken-skill'));
|
|
// No SKILL.md inside
|
|
|
|
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should fail on missing SKILL.md');
|
|
assert.ok(result.stderr.includes('Missing SKILL.md'), 'Should report missing SKILL.md');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on empty SKILL.md', () => {
|
|
const testDir = createTestDir();
|
|
const skillDir = path.join(testDir, 'empty-skill');
|
|
fs.mkdirSync(skillDir);
|
|
fs.writeFileSync(path.join(skillDir, 'SKILL.md'), '');
|
|
|
|
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should fail on empty SKILL.md');
|
|
assert.ok(result.stderr.includes('Empty'), 'Should report empty file');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('passes on valid skill directory', () => {
|
|
const testDir = createTestDir();
|
|
const skillDir = path.join(testDir, 'good-skill');
|
|
fs.mkdirSync(skillDir);
|
|
fs.writeFileSync(path.join(skillDir, 'SKILL.md'), '# My Skill\nDescription here.');
|
|
|
|
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should pass for valid skill');
|
|
assert.ok(result.stdout.includes('Validated 1'), 'Should report 1 validated');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('ignores non-directory entries', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'not-a-skill.md'), '# README');
|
|
const skillDir = path.join(testDir, 'real-skill');
|
|
fs.mkdirSync(skillDir);
|
|
fs.writeFileSync(path.join(skillDir, 'SKILL.md'), '# Skill');
|
|
|
|
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should ignore non-directory entries');
|
|
assert.ok(result.stdout.includes('Validated 1'), 'Should count only directories');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on whitespace-only SKILL.md', () => {
|
|
const testDir = createTestDir();
|
|
const skillDir = path.join(testDir, 'blank-skill');
|
|
fs.mkdirSync(skillDir);
|
|
fs.writeFileSync(path.join(skillDir, 'SKILL.md'), ' \n\t\n ');
|
|
|
|
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject whitespace-only SKILL.md');
|
|
assert.ok(result.stderr.includes('Empty file'), 'Should report empty file');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ==========================================
|
|
// validate-commands.js
|
|
// ==========================================
|
|
console.log('\nvalidate-commands.js:');
|
|
|
|
if (test('passes on real project commands', () => {
|
|
const result = runValidator('validate-commands');
|
|
assert.strictEqual(result.code, 0, `Should pass, got stderr: ${result.stderr}`);
|
|
assert.ok(result.stdout.includes('Validated'), 'Should output validation count');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('exits 0 when directory does not exist', () => {
|
|
const result = runValidatorWithDir('validate-commands', 'COMMANDS_DIR', '/nonexistent/dir');
|
|
assert.strictEqual(result.code, 0, 'Should skip when no commands dir');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on empty command file', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'empty.md'), '');
|
|
|
|
const result = runValidatorWithDir('validate-commands', 'COMMANDS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should fail on empty file');
|
|
assert.ok(result.stderr.includes('Empty'), 'Should report empty file');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('passes on valid command files', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'deploy.md'), '# Deploy\nDeploy the application.');
|
|
fs.writeFileSync(path.join(testDir, 'test.md'), '# Test\nRun all tests.');
|
|
|
|
const result = runValidatorWithDir('validate-commands', 'COMMANDS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should pass for valid commands');
|
|
assert.ok(result.stdout.includes('Validated 2'), 'Should report 2 validated');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('ignores non-md files', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'script.js'), 'console.log(1)');
|
|
fs.writeFileSync(path.join(testDir, 'valid.md'), '# Command');
|
|
|
|
const result = runValidatorWithDir('validate-commands', 'COMMANDS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should ignore non-md files');
|
|
assert.ok(result.stdout.includes('Validated 1'), 'Should count only .md files');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('detects broken command cross-reference', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'my-cmd.md'), '# Command\nUse `/nonexistent-cmd` to do things.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 1, 'Should fail on broken command ref');
|
|
assert.ok(result.stderr.includes('nonexistent-cmd'), 'Should report broken command');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('detects broken agent path reference', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'cmd.md'), '# Command\nAgent: `agents/fake-agent.md`');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 1, 'Should fail on broken agent ref');
|
|
assert.ok(result.stderr.includes('fake-agent'), 'Should report broken agent');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('skips references inside fenced code blocks', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'cmd.md'),
|
|
'# Command\n\n```\nagents/example-agent.md\n`/example-cmd`\n```\n');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should skip refs inside code blocks');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('detects broken workflow agent reference', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(agentsDir, 'planner.md'), '---\nmodel: sonnet\ntools: Read\n---\n# A');
|
|
fs.writeFileSync(path.join(testDir, 'cmd.md'), '# Command\nWorkflow:\nplanner -> ghost-agent');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 1, 'Should fail on broken workflow agent');
|
|
assert.ok(result.stderr.includes('ghost-agent'), 'Should report broken workflow agent');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('skips command references on creates: lines', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// "Creates: `/new-table`" should NOT flag /new-table as a broken ref
|
|
fs.writeFileSync(path.join(testDir, 'gen.md'),
|
|
'# Generator\n\n→ Creates: `/new-table`\nWould create: `/new-endpoint`');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should skip creates: lines');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('accepts valid cross-reference between commands', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'build.md'), '# Build\nSee also `/deploy` for deployment.');
|
|
fs.writeFileSync(path.join(testDir, 'deploy.md'), '# Deploy\nRun `/build` first.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should accept valid cross-refs');
|
|
assert.ok(result.stdout.includes('Validated 2'), 'Should validate both');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('checks references in unclosed code blocks', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// Unclosed code block: the ``` regex won't strip it, so refs inside are checked
|
|
fs.writeFileSync(path.join(testDir, 'bad.md'),
|
|
'# Command\n\n```\n`/phantom-cmd`\nno closing block');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
// Unclosed code blocks are NOT stripped, so refs inside are validated
|
|
assert.strictEqual(result.code, 1, 'Should check refs in unclosed code blocks');
|
|
assert.ok(result.stderr.includes('phantom-cmd'), 'Should report broken ref from unclosed block');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('captures ALL command references on a single line (multi-ref)', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// Line with two command references — both should be detected
|
|
fs.writeFileSync(path.join(testDir, 'multi.md'),
|
|
'# Multi\nUse `/ghost-a` and `/ghost-b` together.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 1, 'Should fail on broken refs');
|
|
// BOTH ghost-a AND ghost-b must be reported (this was the greedy regex bug)
|
|
assert.ok(result.stderr.includes('ghost-a'), 'Should report first ref /ghost-a');
|
|
assert.ok(result.stderr.includes('ghost-b'), 'Should report second ref /ghost-b');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('captures three command refs on one line', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'triple.md'),
|
|
'# Triple\nChain `/alpha`, `/beta`, and `/gamma` in order.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 1, 'Should fail on all three broken refs');
|
|
assert.ok(result.stderr.includes('alpha'), 'Should report /alpha');
|
|
assert.ok(result.stderr.includes('beta'), 'Should report /beta');
|
|
assert.ok(result.stderr.includes('gamma'), 'Should report /gamma');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('multi-ref line with one valid and one invalid ref', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// "real-cmd" exists, "fake-cmd" does not
|
|
fs.writeFileSync(path.join(testDir, 'real-cmd.md'), '# Real\nA real command.');
|
|
fs.writeFileSync(path.join(testDir, 'mixed.md'),
|
|
'# Mixed\nRun `/real-cmd` then `/fake-cmd`.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 1, 'Should fail for the fake ref');
|
|
assert.ok(result.stderr.includes('fake-cmd'), 'Should report /fake-cmd');
|
|
// real-cmd should NOT appear in errors
|
|
assert.ok(!result.stderr.includes('real-cmd'), 'Should not report valid /real-cmd');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('creates: line with multiple refs skips entire line', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// Both refs on a "Creates:" line should be skipped entirely
|
|
fs.writeFileSync(path.join(testDir, 'gen.md'),
|
|
'# Generator\nCreates: `/new-a` and `/new-b`');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should skip all refs on creates: line');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('validates valid workflow diagram with known agents', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(agentsDir, 'planner.md'), '---\nmodel: sonnet\ntools: Read\n---\n# P');
|
|
fs.writeFileSync(path.join(agentsDir, 'reviewer.md'), '---\nmodel: sonnet\ntools: Read\n---\n# R');
|
|
fs.writeFileSync(path.join(testDir, 'flow.md'), '# Workflow\n\nplanner -> reviewer');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should pass on valid workflow');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ==========================================
|
|
// validate-rules.js
|
|
// ==========================================
|
|
console.log('\nvalidate-rules.js:');
|
|
|
|
if (test('passes on real project rules', () => {
|
|
const result = runValidator('validate-rules');
|
|
assert.strictEqual(result.code, 0, `Should pass, got stderr: ${result.stderr}`);
|
|
assert.ok(result.stdout.includes('Validated'), 'Should output validation count');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('exits 0 when directory does not exist', () => {
|
|
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', '/nonexistent/dir');
|
|
assert.strictEqual(result.code, 0, 'Should skip when no rules dir');
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on empty rule file', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'empty.md'), '');
|
|
|
|
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should fail on empty rule file');
|
|
assert.ok(result.stderr.includes('Empty'), 'Should report empty file');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('passes on valid rule files', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'coding.md'), '# Coding Rules\nUse immutability.');
|
|
|
|
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should pass for valid rules');
|
|
assert.ok(result.stdout.includes('Validated 1'), 'Should report 1 validated');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails on whitespace-only rule file', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'blank.md'), ' \n\t\n ');
|
|
|
|
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject whitespace-only rule file');
|
|
assert.ok(result.stderr.includes('Empty'), 'Should report empty file');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('validates rules in subdirectories recursively', () => {
|
|
const testDir = createTestDir();
|
|
const subDir = path.join(testDir, 'sub');
|
|
fs.mkdirSync(subDir);
|
|
fs.writeFileSync(path.join(testDir, 'top.md'), '# Top Level Rule');
|
|
fs.writeFileSync(path.join(subDir, 'nested.md'), '# Nested Rule');
|
|
|
|
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should validate nested rules');
|
|
assert.ok(result.stdout.includes('Validated 2'), 'Should find both rules');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ==========================================
|
|
// Round 19: Whitespace and edge-case tests
|
|
// ==========================================
|
|
|
|
// --- validate-hooks.js whitespace/null edge cases ---
|
|
console.log('\nvalidate-hooks.js (whitespace edge cases):');
|
|
|
|
if (test('rejects whitespace-only command string', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: ' \t ' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject whitespace-only command');
|
|
assert.ok(result.stderr.includes('command'), 'Should report command field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects null command value', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: null }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject null command');
|
|
assert.ok(result.stderr.includes('command'), 'Should report command field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects numeric command value', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 42 }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject numeric command');
|
|
assert.ok(result.stderr.includes('command'), 'Should report command field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// --- validate-agents.js whitespace edge cases ---
|
|
console.log('\nvalidate-agents.js (whitespace edge cases):');
|
|
|
|
if (test('rejects agent with whitespace-only model value', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'ws-model.md'), '---\nmodel: \t \ntools: Read, Write\n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject whitespace-only model');
|
|
assert.ok(result.stderr.includes('model'), 'Should report model field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects agent with whitespace-only tools value', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'ws-tools.md'), '---\nmodel: sonnet\ntools: \t \n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject whitespace-only tools');
|
|
assert.ok(result.stderr.includes('tools'), 'Should report tools field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('accepts agent with extra unknown frontmatter fields', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'extra.md'), '---\nmodel: sonnet\ntools: Read, Write\ncustom_field: some value\nauthor: test\n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should accept extra unknown fields');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects agent with invalid model value', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'bad-model.md'), '---\nmodel: gpt-4\ntools: Read\n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject invalid model');
|
|
assert.ok(result.stderr.includes('Invalid model'), 'Should report invalid model');
|
|
assert.ok(result.stderr.includes('gpt-4'), 'Should show the invalid value');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// --- validate-commands.js additional edge cases ---
|
|
console.log('\nvalidate-commands.js (additional edge cases):');
|
|
|
|
if (test('reports all invalid agents in mixed agent references', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(agentsDir, 'real-agent.md'), '---\nmodel: sonnet\ntools: Read\n---\n# A');
|
|
fs.writeFileSync(path.join(testDir, 'cmd.md'),
|
|
'# Cmd\nSee agents/real-agent.md and agents/fake-one.md and agents/fake-two.md');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 1, 'Should fail on invalid agent refs');
|
|
assert.ok(result.stderr.includes('fake-one'), 'Should report first invalid agent');
|
|
assert.ok(result.stderr.includes('fake-two'), 'Should report second invalid agent');
|
|
assert.ok(!result.stderr.includes('real-agent'), 'Should NOT report valid agent');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('validates workflow with hyphenated agent names', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(agentsDir, 'tdd-guide.md'), '---\nmodel: sonnet\ntools: Read\n---\n# T');
|
|
fs.writeFileSync(path.join(agentsDir, 'code-reviewer.md'), '---\nmodel: sonnet\ntools: Read\n---\n# C');
|
|
fs.writeFileSync(path.join(testDir, 'flow.md'), '# Workflow\n\ntdd-guide -> code-reviewer');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should pass on hyphenated agent names in workflow');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('detects skill directory reference warning', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// Reference a non-existent skill directory
|
|
fs.writeFileSync(path.join(testDir, 'cmd.md'),
|
|
'# Command\nSee skills/nonexistent-skill/ for details.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
// Should pass (warnings don't cause exit 1) but stderr should have warning
|
|
assert.strictEqual(result.code, 0, 'Skill warnings should not cause failure');
|
|
assert.ok(result.stdout.includes('warning'), 'Should report warning count');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ==========================================
|
|
// Round 22: Hook schema edge cases & empty directory paths
|
|
// ==========================================
|
|
|
|
// --- validate-hooks.js: schema edge cases ---
|
|
console.log('\nvalidate-hooks.js (schema edge cases):');
|
|
|
|
if (test('rejects event type value that is not an array', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: { PreToolUse: 'not-an-array' }
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on non-array event type value');
|
|
assert.ok(result.stderr.includes('must be an array'), 'Should report must be an array');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects matcher entry that is null', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: { PreToolUse: [null] }
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on null matcher entry');
|
|
assert.ok(result.stderr.includes('is not an object'), 'Should report not an object');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects matcher entry that is a string', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: { PreToolUse: ['just-a-string'] }
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on string matcher entry');
|
|
assert.ok(result.stderr.includes('is not an object'), 'Should report not an object');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects top-level data that is a string', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, '"just a string"');
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on string data');
|
|
assert.ok(result.stderr.includes('must be an object or array'), 'Should report must be object or array');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects top-level data that is a number', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, '42');
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on numeric data');
|
|
assert.ok(result.stderr.includes('must be an object or array'), 'Should report must be object or array');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects empty string command', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: '' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject empty string command');
|
|
assert.ok(result.stderr.includes('command'), 'Should report command field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects empty array command', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: [] }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject empty array command');
|
|
assert.ok(result.stderr.includes('command'), 'Should report command field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects array command with non-string elements', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: ['node', 123, null] }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject non-string array elements');
|
|
assert.ok(result.stderr.includes('command'), 'Should report command field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects non-string type field', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 42, command: 'echo hi' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject non-string type');
|
|
assert.ok(result.stderr.includes('type'), 'Should report type field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects non-number timeout type', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo', timeout: 'fast' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject string timeout');
|
|
assert.ok(result.stderr.includes('timeout'), 'Should report timeout type error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('accepts timeout of exactly 0', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo', timeout: 0 }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 0, 'Should accept timeout of 0');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('validates object format without wrapping hooks key', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
// data.hooks is undefined, so fallback to data itself
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo ok' }] }]
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 0, 'Should accept object format without hooks wrapper');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// --- validate-hooks.js: legacy format error paths ---
|
|
console.log('\nvalidate-hooks.js (legacy format errors):');
|
|
|
|
if (test('legacy format: rejects matcher missing matcher field', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify([
|
|
{ hooks: [{ type: 'command', command: 'echo ok' }] }
|
|
]));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on missing matcher in legacy format');
|
|
assert.ok(result.stderr.includes('matcher'), 'Should report missing matcher');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('legacy format: rejects matcher missing hooks array', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify([
|
|
{ matcher: 'test' }
|
|
]));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on missing hooks array in legacy format');
|
|
assert.ok(result.stderr.includes('hooks'), 'Should report missing hooks');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// --- validate-agents.js: empty directory ---
|
|
console.log('\nvalidate-agents.js (empty directory):');
|
|
|
|
if (test('passes on empty agents directory', () => {
|
|
const testDir = createTestDir();
|
|
// No .md files, just an empty dir
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should pass on empty directory');
|
|
assert.ok(result.stdout.includes('Validated 0'), 'Should report 0 validated');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// --- validate-commands.js: whitespace-only file ---
|
|
console.log('\nvalidate-commands.js (whitespace edge cases):');
|
|
|
|
if (test('fails on whitespace-only command file', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'blank.md'), ' \n\t\n ');
|
|
|
|
const result = runValidatorWithDir('validate-commands', 'COMMANDS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject whitespace-only command file');
|
|
assert.ok(result.stderr.includes('Empty'), 'Should report empty file');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('accepts valid skill directory reference', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// Create a matching skill directory
|
|
fs.mkdirSync(path.join(skillsDir, 'my-skill'));
|
|
fs.writeFileSync(path.join(testDir, 'cmd.md'),
|
|
'# Command\nSee skills/my-skill/ for details.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should pass on valid skill reference');
|
|
assert.ok(!result.stdout.includes('warning'), 'Should have no warnings');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
// --- validate-rules.js: mixed valid/invalid ---
|
|
console.log('\nvalidate-rules.js (mixed files):');
|
|
|
|
if (test('fails on mix of valid and empty rule files', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'good.md'), '# Good Rule\nContent here.');
|
|
fs.writeFileSync(path.join(testDir, 'bad.md'), '');
|
|
|
|
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should fail when any rule is empty');
|
|
assert.ok(result.stderr.includes('bad.md'), 'Should report the bad file');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ── Round 27: hook validation edge cases ──
|
|
console.log('\nvalidate-hooks.js (Round 27 edge cases):');
|
|
|
|
if (test('rejects array command with empty string element', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: ['node', '', 'script.js'] }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject array with empty string element');
|
|
assert.ok(result.stderr.includes('command'), 'Should report command field error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects negative timeout', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo hi', timeout: -5 }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject negative timeout');
|
|
assert.ok(result.stderr.includes('timeout'), 'Should report timeout error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects non-boolean async field', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PostToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo ok', async: 'yes' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should reject non-boolean async');
|
|
assert.ok(result.stderr.includes('async'), 'Should report async type error');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('reports correct index for error in deeply nested hook', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
const manyHooks = [];
|
|
for (let i = 0; i < 5; i++) {
|
|
manyHooks.push({ type: 'command', command: 'echo ok' });
|
|
}
|
|
// Add an invalid hook at index 5
|
|
manyHooks.push({ type: 'command', command: '' });
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: manyHooks }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 1, 'Should fail on invalid hook at high index');
|
|
assert.ok(result.stderr.includes('hooks[5]'), 'Should report correct hook index 5');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('validates node -e with escaped quotes in inline JS', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'node -e "const x = 1 + 2; process.exit(0)"' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 0, 'Should pass valid multi-statement inline JS');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('accepts multiple valid event types in single hooks file', () => {
|
|
const testDir = createTestDir();
|
|
const hooksFile = path.join(testDir, 'hooks.json');
|
|
fs.writeFileSync(hooksFile, JSON.stringify({
|
|
hooks: {
|
|
PreToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo pre' }] }],
|
|
PostToolUse: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo post' }] }],
|
|
Stop: [{ matcher: 'test', hooks: [{ type: 'command', command: 'echo stop' }] }]
|
|
}
|
|
}));
|
|
|
|
const result = runValidatorWithDir('validate-hooks', 'HOOKS_FILE', hooksFile);
|
|
assert.strictEqual(result.code, 0, 'Should accept multiple valid event types');
|
|
assert.ok(result.stdout.includes('3'), 'Should report 3 matchers validated');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ── Round 27: command validation edge cases ──
|
|
console.log('\nvalidate-commands.js (Round 27 edge cases):');
|
|
|
|
if (test('validates multiple command refs on same non-creates line', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// Create two valid commands
|
|
fs.writeFileSync(path.join(testDir, 'cmd-a.md'), '# Command A\nBasic command.');
|
|
fs.writeFileSync(path.join(testDir, 'cmd-b.md'), '# Command B\nBasic command.');
|
|
// Create a third command that references both on one line
|
|
fs.writeFileSync(path.join(testDir, 'cmd-c.md'),
|
|
'# Command C\nUse `/cmd-a` and `/cmd-b` together.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should pass when multiple refs on same line are all valid');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('fails when one of multiple refs on same line is invalid', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// Only cmd-a exists
|
|
fs.writeFileSync(path.join(testDir, 'cmd-a.md'), '# Command A\nBasic command.');
|
|
// cmd-c references cmd-a (valid) and cmd-z (invalid) on same line
|
|
fs.writeFileSync(path.join(testDir, 'cmd-c.md'),
|
|
'# Command C\nUse `/cmd-a` and `/cmd-z` together.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 1, 'Should fail when any ref is invalid');
|
|
assert.ok(result.stderr.includes('cmd-z'), 'Should report the invalid reference');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('code blocks are stripped before checking references', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// Reference inside a code block should not be validated
|
|
fs.writeFileSync(path.join(testDir, 'cmd-x.md'),
|
|
'# Command X\n```\n`/nonexistent-cmd` in code block\n```\nEnd.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should ignore command refs inside code blocks');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
// --- validate-skills.js: mixed valid/invalid ---
|
|
console.log('\nvalidate-skills.js (mixed dirs):');
|
|
|
|
if (test('fails on mix of valid and invalid skill directories', () => {
|
|
const testDir = createTestDir();
|
|
// Valid skill
|
|
const goodSkill = path.join(testDir, 'good-skill');
|
|
fs.mkdirSync(goodSkill);
|
|
fs.writeFileSync(path.join(goodSkill, 'SKILL.md'), '# Good Skill');
|
|
// Missing SKILL.md
|
|
const badSkill = path.join(testDir, 'bad-skill');
|
|
fs.mkdirSync(badSkill);
|
|
// Empty SKILL.md
|
|
const emptySkill = path.join(testDir, 'empty-skill');
|
|
fs.mkdirSync(emptySkill);
|
|
fs.writeFileSync(path.join(emptySkill, 'SKILL.md'), '');
|
|
|
|
const result = runValidatorWithDir('validate-skills', 'SKILLS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should fail when any skill is invalid');
|
|
assert.ok(result.stderr.includes('bad-skill'), 'Should report missing SKILL.md');
|
|
assert.ok(result.stderr.includes('empty-skill'), 'Should report empty SKILL.md');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ── Round 30: validate-commands skill warnings and workflow edge cases ──
|
|
console.log('\nRound 30: validate-commands (skill warnings):');
|
|
|
|
if (test('warns (not errors) when skill directory reference is not found', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
// Create a command that references a skill via path (skills/name/) format
|
|
// but the skill doesn't exist — should warn, not error
|
|
fs.writeFileSync(path.join(testDir, 'cmd-a.md'),
|
|
'# Command A\nSee skills/nonexistent-skill/ for details.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
// Skill directory references produce warnings, not errors — exit 0
|
|
assert.strictEqual(result.code, 0, 'Skill path references should warn, not error');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('passes when command has no slash references at all', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'cmd-simple.md'),
|
|
'# Simple Command\nThis command has no references to other commands.');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should pass with no references');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
console.log('\nRound 30: validate-agents (model validation):');
|
|
|
|
if (test('rejects agent with unrecognized model value', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'bad-model.md'),
|
|
'---\nmodel: gpt-4\ntools: Read, Write\n---\n# Bad Model Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject unrecognized model');
|
|
assert.ok(result.stderr.includes('gpt-4'), 'Should mention the invalid model');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('accepts all valid model values (haiku, sonnet, opus)', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'haiku.md'),
|
|
'---\nmodel: haiku\ntools: Read\n---\n# Haiku Agent');
|
|
fs.writeFileSync(path.join(testDir, 'sonnet.md'),
|
|
'---\nmodel: sonnet\ntools: Read, Write\n---\n# Sonnet Agent');
|
|
fs.writeFileSync(path.join(testDir, 'opus.md'),
|
|
'---\nmodel: opus\ntools: Read, Write, Bash\n---\n# Opus Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'All valid models should pass');
|
|
assert.ok(result.stdout.includes('3'), 'Should validate 3 agent files');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
// ── Round 32: empty frontmatter & edge cases ──
|
|
console.log('\nRound 32: validate-agents (empty frontmatter):');
|
|
|
|
if (test('rejects agent with empty frontmatter block (no key-value pairs)', () => {
|
|
const testDir = createTestDir();
|
|
// Blank line between --- markers creates a valid but empty frontmatter block
|
|
fs.writeFileSync(path.join(testDir, 'empty-fm.md'), '---\n\n---\n# Agent with empty frontmatter');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject empty frontmatter');
|
|
assert.ok(result.stderr.includes('model'), 'Should report missing model');
|
|
assert.ok(result.stderr.includes('tools'), 'Should report missing tools');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects agent with no content between --- markers (Missing frontmatter)', () => {
|
|
const testDir = createTestDir();
|
|
// ---\n--- with no blank line → regex doesn't match → "Missing frontmatter"
|
|
fs.writeFileSync(path.join(testDir, 'no-fm.md'), '---\n---\n# Agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject missing frontmatter');
|
|
assert.ok(result.stderr.includes('Missing frontmatter'), 'Should report missing frontmatter');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('rejects agent with partial frontmatter (only model, no tools)', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'partial.md'), '---\nmodel: haiku\n---\n# Partial agent');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should reject partial frontmatter');
|
|
assert.ok(result.stderr.includes('tools'), 'Should report missing tools');
|
|
assert.ok(!result.stderr.includes('model'), 'Should NOT report model (it is present)');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('handles multiple agents where only one is invalid', () => {
|
|
const testDir = createTestDir();
|
|
fs.writeFileSync(path.join(testDir, 'good.md'), '---\nmodel: sonnet\ntools: Read\n---\n# Good');
|
|
fs.writeFileSync(path.join(testDir, 'bad.md'), '---\nmodel: invalid-model\ntools: Read\n---\n# Bad');
|
|
|
|
const result = runValidatorWithDir('validate-agents', 'AGENTS_DIR', testDir);
|
|
assert.strictEqual(result.code, 1, 'Should fail when any agent is invalid');
|
|
assert.ok(result.stderr.includes('bad.md'), 'Should identify the bad file');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
console.log('\nRound 32: validate-rules (non-file entries):');
|
|
|
|
if (test('skips directory entries even if named with .md extension', () => {
|
|
const testDir = createTestDir();
|
|
// Create a directory named "tricky.md" — stat.isFile() should skip it
|
|
fs.mkdirSync(path.join(testDir, 'tricky.md'));
|
|
fs.writeFileSync(path.join(testDir, 'real.md'), '# A real rule');
|
|
|
|
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should skip directory entries');
|
|
assert.ok(result.stdout.includes('Validated 1'), 'Should count only the real file');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('handles deeply nested rule in subdirectory', () => {
|
|
const testDir = createTestDir();
|
|
const deepDir = path.join(testDir, 'cat1', 'sub1');
|
|
fs.mkdirSync(deepDir, { recursive: true });
|
|
fs.writeFileSync(path.join(deepDir, 'deep-rule.md'), '# Deep nested rule');
|
|
|
|
const result = runValidatorWithDir('validate-rules', 'RULES_DIR', testDir);
|
|
assert.strictEqual(result.code, 0, 'Should validate deeply nested rules');
|
|
assert.ok(result.stdout.includes('Validated 1'), 'Should find the nested rule');
|
|
cleanupTestDir(testDir);
|
|
})) passed++; else failed++;
|
|
|
|
console.log('\nRound 32: validate-commands (agent reference with valid workflow):');
|
|
|
|
if (test('passes workflow with three chained agents', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(agentsDir, 'planner.md'), '---\nmodel: sonnet\ntools: Read\n---\n# P');
|
|
fs.writeFileSync(path.join(agentsDir, 'tdd-guide.md'), '---\nmodel: sonnet\ntools: Read\n---\n# T');
|
|
fs.writeFileSync(path.join(agentsDir, 'code-reviewer.md'), '---\nmodel: sonnet\ntools: Read\n---\n# C');
|
|
fs.writeFileSync(path.join(testDir, 'flow.md'), '# Flow\n\nplanner -> tdd-guide -> code-reviewer');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 0, 'Should pass on valid 3-agent workflow');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
if (test('detects broken agent in middle of workflow chain', () => {
|
|
const testDir = createTestDir();
|
|
const agentsDir = createTestDir();
|
|
const skillsDir = createTestDir();
|
|
fs.writeFileSync(path.join(agentsDir, 'planner.md'), '---\nmodel: sonnet\ntools: Read\n---\n# P');
|
|
fs.writeFileSync(path.join(agentsDir, 'code-reviewer.md'), '---\nmodel: sonnet\ntools: Read\n---\n# C');
|
|
// missing-agent is NOT created
|
|
fs.writeFileSync(path.join(testDir, 'flow.md'), '# Flow\n\nplanner -> missing-agent -> code-reviewer');
|
|
|
|
const result = runValidatorWithDirs('validate-commands', {
|
|
COMMANDS_DIR: testDir, AGENTS_DIR: agentsDir, SKILLS_DIR: skillsDir
|
|
});
|
|
assert.strictEqual(result.code, 1, 'Should detect broken agent in workflow chain');
|
|
assert.ok(result.stderr.includes('missing-agent'), 'Should report the missing agent');
|
|
cleanupTestDir(testDir); cleanupTestDir(agentsDir); cleanupTestDir(skillsDir);
|
|
})) passed++; else failed++;
|
|
|
|
// Summary
|
|
console.log(`\nResults: Passed: ${passed}, Failed: ${failed}`);
|
|
process.exit(failed > 0 ? 1 : 0);
|
|
}
|
|
|
|
runTests();
|