docs: add VitePress documentation site

- Add docs directory with VitePress configuration
- Add GitHub Actions workflow for docs build and deploy
- Support bilingual (English/Chinese) documentation
- Include search, custom theme, and responsive design
This commit is contained in:
catlog22
2026-02-28 16:14:09 +08:00
parent ab65caec45
commit c3ddf7e322
136 changed files with 34486 additions and 0 deletions

View File

@@ -0,0 +1,185 @@
import fs from 'node:fs/promises'
import path from 'node:path'
import FlexSearch from 'flexsearch'
import {
createFlexSearchIndex,
FLEXSEARCH_INDEX_VERSION
} from '../.vitepress/search/flexsearch.mjs'
const ROOT_DIR = process.cwd()
const PUBLIC_DIR = path.join(ROOT_DIR, 'public')
const EXCLUDED_DIRS = new Set([
'.github',
'.vitepress',
'.workflow',
'node_modules',
'public',
'scripts'
])
function toPosixPath(filePath) {
return filePath.replaceAll(path.sep, '/')
}
function getLocaleKey(relativePosixPath) {
return relativePosixPath.startsWith('zh/') ? 'zh' : 'root'
}
function toPageUrl(relativePosixPath) {
const withoutExt = relativePosixPath.replace(/\.md$/i, '')
if (withoutExt === 'index') return '/'
if (withoutExt.endsWith('/index')) return `/${withoutExt.slice(0, -'/index'.length)}/`
return `/${withoutExt}`
}
function extractTitle(markdown, relativePosixPath) {
const normalized = markdown.replaceAll('\r\n', '\n')
const frontmatterMatch = normalized.match(/^---\n([\s\S]*?)\n---\n/)
if (frontmatterMatch) {
const fm = frontmatterMatch[1]
const titleLine = fm
.split('\n')
.map((l) => l.trim())
.find((l) => l.toLowerCase().startsWith('title:'))
if (titleLine) {
const raw = titleLine.slice('title:'.length).trim()
return raw.replace(/^['"]|['"]$/g, '') || undefined
}
}
const firstH1 = normalized.match(/^#\s+(.+)\s*$/m)
if (firstH1?.[1]) return firstH1[1].trim()
const fallback = path.basename(relativePosixPath, '.md')
return fallback
.replaceAll('-', ' ')
.replaceAll('_', ' ')
.replace(/\b\w/g, (c) => c.toUpperCase())
}
function stripFrontmatter(markdown) {
const normalized = markdown.replaceAll('\r\n', '\n')
return normalized.replace(/^---\n[\s\S]*?\n---\n/, '')
}
function stripMarkdown(markdown) {
return (
markdown
// SFC blocks
.replace(/<(script|style)[^>]*>[\s\S]*?<\/\1>/gi, ' ')
// Code fences
.replace(/```[\s\S]*?```/g, ' ')
.replace(/~~~[\s\S]*?~~~/g, ' ')
// Inline code
.replace(/`[^`]*`/g, ' ')
// Images and links
.replace(/!\[([^\]]*)\]\([^)]+\)/g, '$1')
.replace(/\[([^\]]+)\]\([^)]+\)/g, '$1')
// Headings / blockquotes
.replace(/^#{1,6}\s+/gm, '')
.replace(/^>\s?/gm, '')
// Lists
.replace(/^\s*[-*+]\s+/gm, '')
.replace(/^\s*\d+\.\s+/gm, '')
// Emphasis
.replace(/[*_~]+/g, ' ')
// HTML tags
.replace(/<[^>]+>/g, ' ')
// Collapse whitespace
.replace(/\s+/g, ' ')
.trim()
)
}
async function collectMarkdownFiles(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true })
const files = []
for (const entry of entries) {
if (entry.isDirectory()) {
if (EXCLUDED_DIRS.has(entry.name)) continue
files.push(...(await collectMarkdownFiles(path.join(dir, entry.name))))
continue
}
if (!entry.isFile()) continue
if (!entry.name.toLowerCase().endsWith('.md')) continue
files.push(path.join(dir, entry.name))
}
return files
}
async function buildIndexForLocale(localeKey, relativePosixPaths) {
const index = createFlexSearchIndex(FlexSearch)
const docs = []
let nextId = 1
for (const rel of relativePosixPaths) {
const abs = path.join(ROOT_DIR, rel)
const markdown = await fs.readFile(abs, 'utf-8')
const title = extractTitle(markdown, rel)
const content = stripMarkdown(stripFrontmatter(markdown))
const url = toPageUrl(rel)
const searchable = `${title}\n${content}`.trim()
if (!searchable) continue
const id = nextId++
index.add(id, searchable)
docs.push({
id,
title,
url,
excerpt: content.slice(0, 180)
})
}
const exported = {}
await index.export((key, data) => {
exported[key] = data
})
return {
version: FLEXSEARCH_INDEX_VERSION,
locale: localeKey,
index: exported,
docs
}
}
async function main() {
await fs.mkdir(PUBLIC_DIR, { recursive: true })
const allMarkdownAbs = await collectMarkdownFiles(ROOT_DIR)
const allMarkdownRel = allMarkdownAbs
.map((abs) => toPosixPath(path.relative(ROOT_DIR, abs)))
.sort((a, b) => a.localeCompare(b))
const byLocale = new Map([
['root', []],
['zh', []]
])
for (const rel of allMarkdownRel) {
const localeKey = getLocaleKey(rel)
byLocale.get(localeKey)?.push(rel)
}
for (const [localeKey, relFiles] of byLocale.entries()) {
const payload = await buildIndexForLocale(localeKey, relFiles)
const outFile = path.join(PUBLIC_DIR, `search-index.${localeKey}.json`)
await fs.writeFile(outFile, JSON.stringify(payload), 'utf-8')
}
}
main().catch((err) => {
console.error(err)
process.exit(1)
})

View File

@@ -0,0 +1,91 @@
#!/usr/bin/env node
/**
* Search Index Size Checker
* Alerts when search index exceeds recommended size for FlexSearch
*/
import fs from 'node:fs'
import path from 'node:path'
import { pathToFileURL } from 'node:url'
const INDEX_PATHS = [
path.join(process.cwd(), '.vitepress/dist/search-index.root.json'),
path.join(process.cwd(), '.vitepress/dist/search-index.zh.json')
]
const MAX_SIZE = 1024 * 1024 // 1MB
const MAX_DOCS = 2000
function checkIndexSize() {
const missing = INDEX_PATHS.filter((p) => !fs.existsSync(p))
if (missing.length > 0) {
console.log('⚠️ Search index not found. Run build first.')
for (const p of missing) console.log(` Missing: ${p}`)
return 1
}
let totalBytes = 0
let totalDocs = 0
console.log(`\n📊 Search Index Analysis`)
console.log(`━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━`)
for (const indexPath of INDEX_PATHS) {
const stats = fs.statSync(indexPath)
totalBytes += stats.size
const sizeKB = (stats.size / 1024).toFixed(2)
const sizeMB = (stats.size / (1024 * 1024)).toFixed(2)
console.log(`File: ${path.relative(process.cwd(), indexPath)}`)
console.log(`Size: ${sizeKB} KB (${sizeMB} MB)`)
try {
const parsed = JSON.parse(fs.readFileSync(indexPath, 'utf-8'))
if (Array.isArray(parsed.docs)) {
totalDocs += parsed.docs.length
console.log(`Docs: ${parsed.docs.length}`)
} else {
console.log(`Docs: (unknown format)`)
}
} catch {
console.log(`Docs: (unavailable)`)
}
console.log('')
}
const totalKB = (totalBytes / 1024).toFixed(2)
const totalMB = (totalBytes / (1024 * 1024)).toFixed(2)
console.log(`Total: ${totalKB} KB (${totalMB} MB)`)
console.log(`Total docs: ~${totalDocs}`)
// Check size threshold
if (totalBytes > MAX_SIZE) {
console.log(`\n⚠️ WARNING: Index size exceeds ${MAX_SIZE / 1024 / 1024} MB`)
console.log(` Current: ${totalMB} MB`)
console.log(` Impact: Slower search performance`)
console.log(` Recommendation: Consider Algolia DocSearch\n`)
console.log(`Migration Options:`)
console.log(` 1. Apply for Algolia DocSearch (free for open source)`)
console.log(` 2. Reduce indexed content`)
console.log(` 3. Split documentation into multiple sites\n`)
return 1
}
if (totalDocs > MAX_DOCS) {
console.log(`\n⚠️ WARNING: Indexed docs exceeds ${MAX_DOCS}`)
console.log(` Current: ${totalDocs} docs`)
console.log(` Recommendation: Consider Algolia DocSearch\n`)
return 1
}
console.log(`\n✅ Search index is within recommended limits\n`)
return 0
}
// Run if called directly
if (import.meta.url === pathToFileURL(process.argv[1]).href) {
process.exit(checkIndexSize())
}
export { checkIndexSize }

View File

@@ -0,0 +1,130 @@
/**
* CLI Documentation Generator
* Parses ccw/src/tools/command-registry.ts and generates Markdown docs
*/
import fs from 'fs'
import path from 'path'
interface Command {
name: string
description: string
options: CommandOption[]
examples: string[]
}
interface CommandOption {
name: string
description: string
type: string
required: boolean
default?: string
}
function parseCommandRegistry(): Command[] {
// This would parse the actual ccw command registry
// For now, return mock data
return [
{
name: 'cli',
description: 'Execute AI-powered CLI operations',
options: [
{
name: '-p, --prompt',
description: 'Prompt text for the AI',
type: 'string',
required: true
},
{
name: '--tool',
description: 'AI tool to use (gemini, codex, qwen, claude)',
type: 'string',
required: false,
default: 'first enabled'
},
{
name: '--mode',
description: 'Execution mode (analysis, write, review)',
type: 'string',
required: true
}
],
examples: [
'ccw cli -p "Analyze codebase" --mode analysis',
'ccw cli -p "Add auth" --mode write --tool codex'
]
},
{
name: 'skill',
description: 'Manage and execute skills',
options: [
{
name: 'list',
description: 'List all available skills',
type: 'boolean',
required: false
},
{
name: 'run',
description: 'Run a specific skill',
type: 'string',
required: false
}
],
examples: [
'ccw skill list',
'ccw skill run commit'
]
}
]
}
function generateCommandMarkdown(command: Command): string {
let md = `## ${command.name}\n\n`
md += `${command.description}\n\n`
if (command.options.length > 0) {
md += `### Options\n\n`
md += `| Option | Type | Required | Default | Description |\n`
md += `|--------|------|----------|---------|-------------|\n`
for (const option of command.options) {
const required = option.required ? 'Yes' : 'No'
const defaultVal = option.default ?? '-'
md += `| \`${option.name}\` | ${option.type} | ${required} | ${defaultVal} | ${option.description} |\n`
}
md += `\n`
}
if (command.examples.length > 0) {
md += `### Examples\n\n`
for (const example of command.examples) {
md += `\`\`\`bash\n${example}\n\`\`\`\n\n`
}
}
return md
}
function generateDocs() {
const commands = parseCommandRegistry()
const outputPath = path.join(process.cwd(), 'cli/commands.generated.md')
let markdown = `# CLI Commands Reference\n\n`
markdown += `Complete reference for all CCW CLI commands.\n\n`
for (const command of commands) {
markdown += generateCommandMarkdown(command)
markdown += `---\n\n`
}
fs.writeFileSync(outputPath, markdown, 'utf-8')
console.log(`✅ Generated CLI documentation: ${outputPath}`)
}
// Run if called directly
if (import.meta.url === `file://${process.argv[1]}`) {
generateDocs()
}
export { generateDocs, parseCommandRegistry, generateCommandMarkdown }