mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-08 02:14:08 +08:00
refactor: Update workflow plan system and template organization
- Remove --analyze|--deep parameters from plan.md, use default analysis - Change .analysis to .process directory structure for better organization - Create ANALYSIS_RESULTS.md template focused on verified results - Add .process folder to workflow-architecture.md file structure - Template emphasizes verification of files, methods, and commands - Prevent execution errors from non-existent references 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
16
.claude/python_script/utils/__init__.py
Normal file
16
.claude/python_script/utils/__init__.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""
|
||||
Shared utility functions and helpers.
|
||||
Provides common functionality for colors, caching, and I/O operations.
|
||||
"""
|
||||
|
||||
from .colors import Colors
|
||||
from .cache import CacheManager
|
||||
from .io_helpers import IOHelpers, ensure_directory, safe_read_file
|
||||
|
||||
__all__ = [
|
||||
'Colors',
|
||||
'CacheManager',
|
||||
'IOHelpers',
|
||||
'ensure_directory',
|
||||
'safe_read_file'
|
||||
]
|
||||
BIN
.claude/python_script/utils/__pycache__/__init__.cpython-313.pyc
Normal file
BIN
.claude/python_script/utils/__pycache__/__init__.cpython-313.pyc
Normal file
Binary file not shown.
BIN
.claude/python_script/utils/__pycache__/cache.cpython-313.pyc
Normal file
BIN
.claude/python_script/utils/__pycache__/cache.cpython-313.pyc
Normal file
Binary file not shown.
BIN
.claude/python_script/utils/__pycache__/colors.cpython-313.pyc
Normal file
BIN
.claude/python_script/utils/__pycache__/colors.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
350
.claude/python_script/utils/cache.py
Normal file
350
.claude/python_script/utils/cache.py
Normal file
@@ -0,0 +1,350 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cache Management Utility
|
||||
Provides unified caching functionality for the analyzer system.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import hashlib
|
||||
import pickle
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Dict, Union
|
||||
from dataclasses import dataclass, asdict
|
||||
|
||||
|
||||
@dataclass
|
||||
class CacheEntry:
|
||||
"""Cache entry with metadata."""
|
||||
value: Any
|
||||
timestamp: float
|
||||
ttl: Optional[float] = None
|
||||
key_hash: Optional[str] = None
|
||||
|
||||
def is_expired(self) -> bool:
|
||||
"""Check if cache entry is expired."""
|
||||
if self.ttl is None:
|
||||
return False
|
||||
return time.time() - self.timestamp > self.ttl
|
||||
|
||||
def to_dict(self) -> Dict:
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return {
|
||||
'value': self.value,
|
||||
'timestamp': self.timestamp,
|
||||
'ttl': self.ttl,
|
||||
'key_hash': self.key_hash
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict) -> 'CacheEntry':
|
||||
"""Create from dictionary."""
|
||||
return cls(**data)
|
||||
|
||||
|
||||
class CacheManager:
|
||||
"""Unified cache manager with multiple storage backends."""
|
||||
|
||||
def __init__(self, cache_dir: str = "cache", default_ttl: int = 3600):
|
||||
self.cache_dir = Path(cache_dir)
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.default_ttl = default_ttl
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# In-memory cache for fast access
|
||||
self._memory_cache: Dict[str, CacheEntry] = {}
|
||||
|
||||
# Cache subdirectories
|
||||
self.json_cache_dir = self.cache_dir / "json"
|
||||
self.pickle_cache_dir = self.cache_dir / "pickle"
|
||||
self.temp_cache_dir = self.cache_dir / "temp"
|
||||
|
||||
for cache_subdir in [self.json_cache_dir, self.pickle_cache_dir, self.temp_cache_dir]:
|
||||
cache_subdir.mkdir(exist_ok=True)
|
||||
|
||||
def _generate_key_hash(self, key: str) -> str:
|
||||
"""Generate a hash for the cache key."""
|
||||
return hashlib.md5(key.encode('utf-8')).hexdigest()
|
||||
|
||||
def _get_cache_path(self, key: str, cache_type: str = "json") -> Path:
|
||||
"""Get cache file path for a key."""
|
||||
key_hash = self._generate_key_hash(key)
|
||||
|
||||
if cache_type == "json":
|
||||
return self.json_cache_dir / f"{key_hash}.json"
|
||||
elif cache_type == "pickle":
|
||||
return self.pickle_cache_dir / f"{key_hash}.pkl"
|
||||
elif cache_type == "temp":
|
||||
return self.temp_cache_dir / f"{key_hash}.tmp"
|
||||
else:
|
||||
raise ValueError(f"Unsupported cache type: {cache_type}")
|
||||
|
||||
def set(self, key: str, value: Any, ttl: Optional[int] = None,
|
||||
storage: str = "memory") -> bool:
|
||||
"""Set a cache value."""
|
||||
if ttl is None:
|
||||
ttl = self.default_ttl
|
||||
|
||||
entry = CacheEntry(
|
||||
value=value,
|
||||
timestamp=time.time(),
|
||||
ttl=ttl,
|
||||
key_hash=self._generate_key_hash(key)
|
||||
)
|
||||
|
||||
try:
|
||||
if storage == "memory":
|
||||
self._memory_cache[key] = entry
|
||||
return True
|
||||
|
||||
elif storage == "json":
|
||||
cache_path = self._get_cache_path(key, "json")
|
||||
with open(cache_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(entry.to_dict(), f, indent=2, default=str)
|
||||
return True
|
||||
|
||||
elif storage == "pickle":
|
||||
cache_path = self._get_cache_path(key, "pickle")
|
||||
with open(cache_path, 'wb') as f:
|
||||
pickle.dump(entry, f)
|
||||
return True
|
||||
|
||||
else:
|
||||
self.logger.warning(f"Unsupported storage type: {storage}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to set cache for key '{key}': {e}")
|
||||
return False
|
||||
|
||||
def get(self, key: str, storage: str = "memory",
|
||||
default: Any = None) -> Any:
|
||||
"""Get a cache value."""
|
||||
try:
|
||||
entry = None
|
||||
|
||||
if storage == "memory":
|
||||
entry = self._memory_cache.get(key)
|
||||
|
||||
elif storage == "json":
|
||||
cache_path = self._get_cache_path(key, "json")
|
||||
if cache_path.exists():
|
||||
with open(cache_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
entry = CacheEntry.from_dict(data)
|
||||
|
||||
elif storage == "pickle":
|
||||
cache_path = self._get_cache_path(key, "pickle")
|
||||
if cache_path.exists():
|
||||
with open(cache_path, 'rb') as f:
|
||||
entry = pickle.load(f)
|
||||
|
||||
else:
|
||||
self.logger.warning(f"Unsupported storage type: {storage}")
|
||||
return default
|
||||
|
||||
if entry is None:
|
||||
return default
|
||||
|
||||
# Check if entry is expired
|
||||
if entry.is_expired():
|
||||
self.delete(key, storage)
|
||||
return default
|
||||
|
||||
return entry.value
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get cache for key '{key}': {e}")
|
||||
return default
|
||||
|
||||
def delete(self, key: str, storage: str = "memory") -> bool:
|
||||
"""Delete a cache entry."""
|
||||
try:
|
||||
if storage == "memory":
|
||||
if key in self._memory_cache:
|
||||
del self._memory_cache[key]
|
||||
return True
|
||||
|
||||
elif storage in ["json", "pickle", "temp"]:
|
||||
cache_path = self._get_cache_path(key, storage)
|
||||
if cache_path.exists():
|
||||
cache_path.unlink()
|
||||
return True
|
||||
|
||||
else:
|
||||
self.logger.warning(f"Unsupported storage type: {storage}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to delete cache for key '{key}': {e}")
|
||||
return False
|
||||
|
||||
def exists(self, key: str, storage: str = "memory") -> bool:
|
||||
"""Check if a cache entry exists and is not expired."""
|
||||
return self.get(key, storage) is not None
|
||||
|
||||
def clear(self, storage: Optional[str] = None) -> bool:
|
||||
"""Clear cache entries."""
|
||||
try:
|
||||
if storage is None or storage == "memory":
|
||||
self._memory_cache.clear()
|
||||
|
||||
if storage is None or storage == "json":
|
||||
for cache_file in self.json_cache_dir.glob("*.json"):
|
||||
cache_file.unlink()
|
||||
|
||||
if storage is None or storage == "pickle":
|
||||
for cache_file in self.pickle_cache_dir.glob("*.pkl"):
|
||||
cache_file.unlink()
|
||||
|
||||
if storage is None or storage == "temp":
|
||||
for cache_file in self.temp_cache_dir.glob("*.tmp"):
|
||||
cache_file.unlink()
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to clear cache: {e}")
|
||||
return False
|
||||
|
||||
def cleanup_expired(self) -> int:
|
||||
"""Clean up expired cache entries."""
|
||||
cleaned_count = 0
|
||||
|
||||
try:
|
||||
# Clean memory cache
|
||||
expired_keys = []
|
||||
for key, entry in self._memory_cache.items():
|
||||
if entry.is_expired():
|
||||
expired_keys.append(key)
|
||||
|
||||
for key in expired_keys:
|
||||
del self._memory_cache[key]
|
||||
cleaned_count += 1
|
||||
|
||||
# Clean file caches
|
||||
for cache_type in ["json", "pickle"]:
|
||||
cache_dir = self.json_cache_dir if cache_type == "json" else self.pickle_cache_dir
|
||||
extension = f".{cache_type}" if cache_type == "json" else ".pkl"
|
||||
|
||||
for cache_file in cache_dir.glob(f"*{extension}"):
|
||||
try:
|
||||
if cache_type == "json":
|
||||
with open(cache_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
entry = CacheEntry.from_dict(data)
|
||||
else:
|
||||
with open(cache_file, 'rb') as f:
|
||||
entry = pickle.load(f)
|
||||
|
||||
if entry.is_expired():
|
||||
cache_file.unlink()
|
||||
cleaned_count += 1
|
||||
|
||||
except Exception:
|
||||
# If we can't read the cache file, delete it
|
||||
cache_file.unlink()
|
||||
cleaned_count += 1
|
||||
|
||||
self.logger.info(f"Cleaned up {cleaned_count} expired cache entries")
|
||||
return cleaned_count
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to cleanup expired cache entries: {e}")
|
||||
return 0
|
||||
|
||||
def get_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache statistics."""
|
||||
stats = {
|
||||
'memory_entries': len(self._memory_cache),
|
||||
'json_files': len(list(self.json_cache_dir.glob("*.json"))),
|
||||
'pickle_files': len(list(self.pickle_cache_dir.glob("*.pkl"))),
|
||||
'temp_files': len(list(self.temp_cache_dir.glob("*.tmp"))),
|
||||
'cache_dir_size': 0
|
||||
}
|
||||
|
||||
# Calculate total cache directory size
|
||||
try:
|
||||
for cache_file in self.cache_dir.rglob("*"):
|
||||
if cache_file.is_file():
|
||||
stats['cache_dir_size'] += cache_file.stat().st_size
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return stats
|
||||
|
||||
def set_file_cache(self, key: str, file_path: Union[str, Path],
|
||||
ttl: Optional[int] = None) -> bool:
|
||||
"""Cache a file by copying it to the cache directory."""
|
||||
try:
|
||||
source_path = Path(file_path)
|
||||
if not source_path.exists():
|
||||
return False
|
||||
|
||||
cache_path = self.temp_cache_dir / f"{self._generate_key_hash(key)}.cached"
|
||||
|
||||
# Copy file to cache
|
||||
import shutil
|
||||
shutil.copy2(source_path, cache_path)
|
||||
|
||||
# Store metadata
|
||||
metadata = {
|
||||
'original_path': str(source_path),
|
||||
'cached_path': str(cache_path),
|
||||
'size': source_path.stat().st_size,
|
||||
'timestamp': time.time(),
|
||||
'ttl': ttl or self.default_ttl
|
||||
}
|
||||
|
||||
return self.set(f"{key}_metadata", metadata, ttl, "json")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to cache file '{file_path}': {e}")
|
||||
return False
|
||||
|
||||
def get_file_cache(self, key: str) -> Optional[Path]:
|
||||
"""Get cached file path."""
|
||||
metadata = self.get(f"{key}_metadata", "json")
|
||||
if metadata is None:
|
||||
return None
|
||||
|
||||
cached_path = Path(metadata['cached_path'])
|
||||
if not cached_path.exists():
|
||||
# Cache file missing, clean up metadata
|
||||
self.delete(f"{key}_metadata", "json")
|
||||
return None
|
||||
|
||||
return cached_path
|
||||
|
||||
|
||||
# Global cache manager instance
|
||||
_global_cache = None
|
||||
|
||||
|
||||
def get_cache_manager(cache_dir: str = "cache", default_ttl: int = 3600) -> CacheManager:
|
||||
"""Get global cache manager instance."""
|
||||
global _global_cache
|
||||
if _global_cache is None:
|
||||
_global_cache = CacheManager(cache_dir, default_ttl)
|
||||
return _global_cache
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test cache functionality
|
||||
cache = CacheManager("test_cache")
|
||||
|
||||
# Test memory cache
|
||||
cache.set("test_key", {"data": "test_value"}, ttl=60)
|
||||
print(f"Memory cache: {cache.get('test_key')}")
|
||||
|
||||
# Test JSON cache
|
||||
cache.set("json_key", {"complex": {"data": [1, 2, 3]}}, ttl=60, storage="json")
|
||||
print(f"JSON cache: {cache.get('json_key', storage='json')}")
|
||||
|
||||
# Test stats
|
||||
print(f"Cache stats: {cache.get_stats()}")
|
||||
|
||||
# Clean up
|
||||
cache.clear()
|
||||
248
.claude/python_script/utils/colors.py
Normal file
248
.claude/python_script/utils/colors.py
Normal file
@@ -0,0 +1,248 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Terminal Colors Utility
|
||||
Provides ANSI color codes for terminal output formatting.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class Colors:
|
||||
"""ANSI color codes for terminal output."""
|
||||
|
||||
# Basic colors
|
||||
RED = '\033[0;31m'
|
||||
GREEN = '\033[0;32m'
|
||||
YELLOW = '\033[1;33m'
|
||||
BLUE = '\033[0;34m'
|
||||
PURPLE = '\033[0;35m'
|
||||
CYAN = '\033[0;36m'
|
||||
WHITE = '\033[0;37m'
|
||||
BLACK = '\033[0;30m'
|
||||
|
||||
# Bright colors
|
||||
BRIGHT_RED = '\033[1;31m'
|
||||
BRIGHT_GREEN = '\033[1;32m'
|
||||
BRIGHT_YELLOW = '\033[1;33m'
|
||||
BRIGHT_BLUE = '\033[1;34m'
|
||||
BRIGHT_PURPLE = '\033[1;35m'
|
||||
BRIGHT_CYAN = '\033[1;36m'
|
||||
BRIGHT_WHITE = '\033[1;37m'
|
||||
|
||||
# Background colors
|
||||
BG_RED = '\033[41m'
|
||||
BG_GREEN = '\033[42m'
|
||||
BG_YELLOW = '\033[43m'
|
||||
BG_BLUE = '\033[44m'
|
||||
BG_PURPLE = '\033[45m'
|
||||
BG_CYAN = '\033[46m'
|
||||
BG_WHITE = '\033[47m'
|
||||
|
||||
# Text formatting
|
||||
BOLD = '\033[1m'
|
||||
DIM = '\033[2m'
|
||||
UNDERLINE = '\033[4m'
|
||||
BLINK = '\033[5m'
|
||||
REVERSE = '\033[7m'
|
||||
STRIKETHROUGH = '\033[9m'
|
||||
|
||||
# Reset
|
||||
NC = '\033[0m' # No Color / Reset
|
||||
RESET = '\033[0m'
|
||||
|
||||
@classmethod
|
||||
def is_tty(cls) -> bool:
|
||||
"""Check if output is a TTY (supports colors)."""
|
||||
return hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
|
||||
|
||||
@classmethod
|
||||
def supports_color(cls) -> bool:
|
||||
"""Check if the terminal supports color output."""
|
||||
# Check environment variables
|
||||
if os.getenv('NO_COLOR'):
|
||||
return False
|
||||
|
||||
if os.getenv('FORCE_COLOR'):
|
||||
return True
|
||||
|
||||
# Check if output is a TTY
|
||||
if not cls.is_tty():
|
||||
return False
|
||||
|
||||
# Check TERM environment variable
|
||||
term = os.getenv('TERM', '').lower()
|
||||
if 'color' in term or term in ('xterm', 'xterm-256color', 'screen', 'tmux'):
|
||||
return True
|
||||
|
||||
# Windows Terminal detection
|
||||
if os.name == 'nt':
|
||||
# Windows 10 version 1511 and later support ANSI colors
|
||||
try:
|
||||
import subprocess
|
||||
result = subprocess.run(['ver'], capture_output=True, text=True, shell=True)
|
||||
if result.returncode == 0:
|
||||
version_info = result.stdout
|
||||
# Extract Windows version (simplified check)
|
||||
if 'Windows' in version_info:
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def colorize(cls, text: str, color: str, bold: bool = False) -> str:
|
||||
"""Apply color to text if colors are supported."""
|
||||
if not cls.supports_color():
|
||||
return text
|
||||
|
||||
prefix = color
|
||||
if bold:
|
||||
prefix = cls.BOLD + prefix
|
||||
|
||||
return f"{prefix}{text}{cls.RESET}"
|
||||
|
||||
@classmethod
|
||||
def red(cls, text: str, bold: bool = False) -> str:
|
||||
"""Color text red."""
|
||||
return cls.colorize(text, cls.RED, bold)
|
||||
|
||||
@classmethod
|
||||
def green(cls, text: str, bold: bool = False) -> str:
|
||||
"""Color text green."""
|
||||
return cls.colorize(text, cls.GREEN, bold)
|
||||
|
||||
@classmethod
|
||||
def yellow(cls, text: str, bold: bool = False) -> str:
|
||||
"""Color text yellow."""
|
||||
return cls.colorize(text, cls.YELLOW, bold)
|
||||
|
||||
@classmethod
|
||||
def blue(cls, text: str, bold: bool = False) -> str:
|
||||
"""Color text blue."""
|
||||
return cls.colorize(text, cls.BLUE, bold)
|
||||
|
||||
@classmethod
|
||||
def purple(cls, text: str, bold: bool = False) -> str:
|
||||
"""Color text purple."""
|
||||
return cls.colorize(text, cls.PURPLE, bold)
|
||||
|
||||
@classmethod
|
||||
def cyan(cls, text: str, bold: bool = False) -> str:
|
||||
"""Color text cyan."""
|
||||
return cls.colorize(text, cls.CYAN, bold)
|
||||
|
||||
@classmethod
|
||||
def bold(cls, text: str) -> str:
|
||||
"""Make text bold."""
|
||||
return cls.colorize(text, '', True)
|
||||
|
||||
@classmethod
|
||||
def dim(cls, text: str) -> str:
|
||||
"""Make text dim."""
|
||||
return cls.colorize(text, cls.DIM)
|
||||
|
||||
@classmethod
|
||||
def underline(cls, text: str) -> str:
|
||||
"""Underline text."""
|
||||
return cls.colorize(text, cls.UNDERLINE)
|
||||
|
||||
@classmethod
|
||||
def success(cls, text: str) -> str:
|
||||
"""Format success message (green)."""
|
||||
return cls.green(f"[SUCCESS] {text}", bold=True)
|
||||
|
||||
@classmethod
|
||||
def error(cls, text: str) -> str:
|
||||
"""Format error message (red)."""
|
||||
return cls.red(f"[ERROR] {text}", bold=True)
|
||||
|
||||
@classmethod
|
||||
def warning(cls, text: str) -> str:
|
||||
"""Format warning message (yellow)."""
|
||||
return cls.yellow(f"[WARNING] {text}", bold=True)
|
||||
|
||||
@classmethod
|
||||
def info(cls, text: str) -> str:
|
||||
"""Format info message (blue)."""
|
||||
return cls.blue(f"[INFO] {text}")
|
||||
|
||||
@classmethod
|
||||
def highlight(cls, text: str) -> str:
|
||||
"""Highlight text (cyan background)."""
|
||||
if not cls.supports_color():
|
||||
return f"[{text}]"
|
||||
return f"{cls.BG_CYAN}{cls.BLACK}{text}{cls.RESET}"
|
||||
|
||||
@classmethod
|
||||
def strip_colors(cls, text: str) -> str:
|
||||
"""Remove ANSI color codes from text."""
|
||||
import re
|
||||
ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
|
||||
return ansi_escape.sub('', text)
|
||||
|
||||
|
||||
# Convenience functions for common usage
|
||||
def colorize(text: str, color: str) -> str:
|
||||
"""Convenience function to colorize text."""
|
||||
return Colors.colorize(text, color)
|
||||
|
||||
|
||||
def red(text: str) -> str:
|
||||
"""Red text."""
|
||||
return Colors.red(text)
|
||||
|
||||
|
||||
def green(text: str) -> str:
|
||||
"""Green text."""
|
||||
return Colors.green(text)
|
||||
|
||||
|
||||
def yellow(text: str) -> str:
|
||||
"""Yellow text."""
|
||||
return Colors.yellow(text)
|
||||
|
||||
|
||||
def blue(text: str) -> str:
|
||||
"""Blue text."""
|
||||
return Colors.blue(text)
|
||||
|
||||
|
||||
def success(text: str) -> str:
|
||||
"""Success message."""
|
||||
return Colors.success(text)
|
||||
|
||||
|
||||
def error(text: str) -> str:
|
||||
"""Error message."""
|
||||
return Colors.error(text)
|
||||
|
||||
|
||||
def warning(text: str) -> str:
|
||||
"""Warning message."""
|
||||
return Colors.warning(text)
|
||||
|
||||
|
||||
def info(text: str) -> str:
|
||||
"""Info message."""
|
||||
return Colors.info(text)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test color output
|
||||
print(Colors.red("Red text"))
|
||||
print(Colors.green("Green text"))
|
||||
print(Colors.yellow("Yellow text"))
|
||||
print(Colors.blue("Blue text"))
|
||||
print(Colors.purple("Purple text"))
|
||||
print(Colors.cyan("Cyan text"))
|
||||
print(Colors.bold("Bold text"))
|
||||
print(Colors.success("Success message"))
|
||||
print(Colors.error("Error message"))
|
||||
print(Colors.warning("Warning message"))
|
||||
print(Colors.info("Info message"))
|
||||
print(Colors.highlight("Highlighted text"))
|
||||
print(f"Color support: {Colors.supports_color()}")
|
||||
print(f"TTY: {Colors.is_tty()}")
|
||||
378
.claude/python_script/utils/io_helpers.py
Normal file
378
.claude/python_script/utils/io_helpers.py
Normal file
@@ -0,0 +1,378 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
I/O Helper Functions
|
||||
Provides common file and directory operations with error handling.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import yaml
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Union, List, Dict
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
|
||||
class IOHelpers:
|
||||
"""Collection of I/O helper methods."""
|
||||
|
||||
@staticmethod
|
||||
def ensure_directory(path: Union[str, Path], mode: int = 0o755) -> bool:
|
||||
"""Ensure directory exists, create if necessary."""
|
||||
try:
|
||||
dir_path = Path(path)
|
||||
dir_path.mkdir(parents=True, exist_ok=True, mode=mode)
|
||||
return True
|
||||
except (PermissionError, OSError) as e:
|
||||
logging.error(f"Failed to create directory '{path}': {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def safe_read_file(file_path: Union[str, Path], encoding: str = 'utf-8',
|
||||
fallback_encoding: str = 'latin-1') -> Optional[str]:
|
||||
"""Safely read file content with encoding fallback."""
|
||||
path = Path(file_path)
|
||||
if not path.exists():
|
||||
return None
|
||||
|
||||
encodings = [encoding, fallback_encoding] if encoding != fallback_encoding else [encoding]
|
||||
|
||||
for enc in encodings:
|
||||
try:
|
||||
with open(path, 'r', encoding=enc) as f:
|
||||
return f.read()
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
except (IOError, OSError) as e:
|
||||
logging.error(f"Failed to read file '{file_path}': {e}")
|
||||
return None
|
||||
|
||||
logging.warning(f"Failed to decode file '{file_path}' with any encoding")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def safe_write_file(file_path: Union[str, Path], content: str,
|
||||
encoding: str = 'utf-8', backup: bool = False) -> bool:
|
||||
"""Safely write content to file with optional backup."""
|
||||
path = Path(file_path)
|
||||
|
||||
try:
|
||||
# Create backup if requested and file exists
|
||||
if backup and path.exists():
|
||||
backup_path = path.with_suffix(path.suffix + '.bak')
|
||||
shutil.copy2(path, backup_path)
|
||||
|
||||
# Ensure parent directory exists
|
||||
if not IOHelpers.ensure_directory(path.parent):
|
||||
return False
|
||||
|
||||
# Write to temporary file first, then move to final location
|
||||
with tempfile.NamedTemporaryFile(mode='w', encoding=encoding,
|
||||
dir=path.parent, delete=False) as tmp_file:
|
||||
tmp_file.write(content)
|
||||
tmp_path = Path(tmp_file.name)
|
||||
|
||||
# Atomic move
|
||||
shutil.move(str(tmp_path), str(path))
|
||||
return True
|
||||
|
||||
except (IOError, OSError) as e:
|
||||
logging.error(f"Failed to write file '{file_path}': {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def read_json(file_path: Union[str, Path], default: Any = None) -> Any:
|
||||
"""Read JSON file with error handling."""
|
||||
content = IOHelpers.safe_read_file(file_path)
|
||||
if content is None:
|
||||
return default
|
||||
|
||||
try:
|
||||
return json.loads(content)
|
||||
except json.JSONDecodeError as e:
|
||||
logging.error(f"Failed to parse JSON from '{file_path}': {e}")
|
||||
return default
|
||||
|
||||
@staticmethod
|
||||
def write_json(file_path: Union[str, Path], data: Any,
|
||||
indent: int = 2, backup: bool = False) -> bool:
|
||||
"""Write data to JSON file."""
|
||||
try:
|
||||
content = json.dumps(data, indent=indent, ensure_ascii=False, default=str)
|
||||
return IOHelpers.safe_write_file(file_path, content, backup=backup)
|
||||
except (TypeError, ValueError) as e:
|
||||
logging.error(f"Failed to serialize data to JSON for '{file_path}': {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def read_yaml(file_path: Union[str, Path], default: Any = None) -> Any:
|
||||
"""Read YAML file with error handling."""
|
||||
content = IOHelpers.safe_read_file(file_path)
|
||||
if content is None:
|
||||
return default
|
||||
|
||||
try:
|
||||
return yaml.safe_load(content)
|
||||
except yaml.YAMLError as e:
|
||||
logging.error(f"Failed to parse YAML from '{file_path}': {e}")
|
||||
return default
|
||||
|
||||
@staticmethod
|
||||
def write_yaml(file_path: Union[str, Path], data: Any, backup: bool = False) -> bool:
|
||||
"""Write data to YAML file."""
|
||||
try:
|
||||
content = yaml.dump(data, default_flow_style=False, allow_unicode=True)
|
||||
return IOHelpers.safe_write_file(file_path, content, backup=backup)
|
||||
except yaml.YAMLError as e:
|
||||
logging.error(f"Failed to serialize data to YAML for '{file_path}': {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def find_files(directory: Union[str, Path], pattern: str = "*",
|
||||
recursive: bool = True, max_depth: Optional[int] = None) -> List[Path]:
|
||||
"""Find files matching pattern in directory."""
|
||||
dir_path = Path(directory)
|
||||
if not dir_path.exists() or not dir_path.is_dir():
|
||||
return []
|
||||
|
||||
files = []
|
||||
try:
|
||||
if recursive:
|
||||
if max_depth is not None:
|
||||
# Implement depth-limited search
|
||||
def search_with_depth(path: Path, current_depth: int = 0):
|
||||
if current_depth > max_depth:
|
||||
return
|
||||
|
||||
for item in path.iterdir():
|
||||
if item.is_file() and item.match(pattern):
|
||||
files.append(item)
|
||||
elif item.is_dir() and current_depth < max_depth:
|
||||
search_with_depth(item, current_depth + 1)
|
||||
|
||||
search_with_depth(dir_path)
|
||||
else:
|
||||
files = list(dir_path.rglob(pattern))
|
||||
else:
|
||||
files = list(dir_path.glob(pattern))
|
||||
|
||||
return sorted(files)
|
||||
|
||||
except (PermissionError, OSError) as e:
|
||||
logging.error(f"Failed to search files in '{directory}': {e}")
|
||||
return []
|
||||
|
||||
@staticmethod
|
||||
def get_file_stats(file_path: Union[str, Path]) -> Optional[Dict[str, Any]]:
|
||||
"""Get file statistics."""
|
||||
path = Path(file_path)
|
||||
if not path.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
stat = path.stat()
|
||||
return {
|
||||
'size': stat.st_size,
|
||||
'modified_time': stat.st_mtime,
|
||||
'created_time': stat.st_ctime,
|
||||
'is_file': path.is_file(),
|
||||
'is_dir': path.is_dir(),
|
||||
'permissions': oct(stat.st_mode)[-3:],
|
||||
'extension': path.suffix.lower(),
|
||||
'name': path.name,
|
||||
'parent': str(path.parent)
|
||||
}
|
||||
except (OSError, PermissionError) as e:
|
||||
logging.error(f"Failed to get stats for '{file_path}': {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def copy_with_backup(source: Union[str, Path], dest: Union[str, Path]) -> bool:
|
||||
"""Copy file with automatic backup if destination exists."""
|
||||
source_path = Path(source)
|
||||
dest_path = Path(dest)
|
||||
|
||||
if not source_path.exists():
|
||||
logging.error(f"Source file '{source}' does not exist")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Create backup if destination exists
|
||||
if dest_path.exists():
|
||||
backup_path = dest_path.with_suffix(dest_path.suffix + '.bak')
|
||||
shutil.copy2(dest_path, backup_path)
|
||||
logging.info(f"Created backup: {backup_path}")
|
||||
|
||||
# Ensure destination directory exists
|
||||
if not IOHelpers.ensure_directory(dest_path.parent):
|
||||
return False
|
||||
|
||||
# Copy file
|
||||
shutil.copy2(source_path, dest_path)
|
||||
return True
|
||||
|
||||
except (IOError, OSError) as e:
|
||||
logging.error(f"Failed to copy '{source}' to '{dest}': {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def move_with_backup(source: Union[str, Path], dest: Union[str, Path]) -> bool:
|
||||
"""Move file with automatic backup if destination exists."""
|
||||
source_path = Path(source)
|
||||
dest_path = Path(dest)
|
||||
|
||||
if not source_path.exists():
|
||||
logging.error(f"Source file '{source}' does not exist")
|
||||
return False
|
||||
|
||||
try:
|
||||
# Create backup if destination exists
|
||||
if dest_path.exists():
|
||||
backup_path = dest_path.with_suffix(dest_path.suffix + '.bak')
|
||||
shutil.move(str(dest_path), str(backup_path))
|
||||
logging.info(f"Created backup: {backup_path}")
|
||||
|
||||
# Ensure destination directory exists
|
||||
if not IOHelpers.ensure_directory(dest_path.parent):
|
||||
return False
|
||||
|
||||
# Move file
|
||||
shutil.move(str(source_path), str(dest_path))
|
||||
return True
|
||||
|
||||
except (IOError, OSError) as e:
|
||||
logging.error(f"Failed to move '{source}' to '{dest}': {e}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def clean_temp_files(directory: Union[str, Path], extensions: List[str] = None,
|
||||
max_age_hours: int = 24) -> int:
|
||||
"""Clean temporary files older than specified age."""
|
||||
if extensions is None:
|
||||
extensions = ['.tmp', '.temp', '.bak', '.swp', '.~']
|
||||
|
||||
dir_path = Path(directory)
|
||||
if not dir_path.exists():
|
||||
return 0
|
||||
|
||||
import time
|
||||
cutoff_time = time.time() - (max_age_hours * 3600)
|
||||
cleaned_count = 0
|
||||
|
||||
try:
|
||||
for file_path in dir_path.rglob('*'):
|
||||
if file_path.is_file():
|
||||
# Check extension
|
||||
if file_path.suffix.lower() in extensions:
|
||||
# Check age
|
||||
if file_path.stat().st_mtime < cutoff_time:
|
||||
try:
|
||||
file_path.unlink()
|
||||
cleaned_count += 1
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
logging.info(f"Cleaned {cleaned_count} temporary files from '{directory}'")
|
||||
return cleaned_count
|
||||
|
||||
except (PermissionError, OSError) as e:
|
||||
logging.error(f"Failed to clean temp files in '{directory}': {e}")
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def get_directory_size(directory: Union[str, Path]) -> int:
|
||||
"""Get total size of directory in bytes."""
|
||||
dir_path = Path(directory)
|
||||
if not dir_path.exists() or not dir_path.is_dir():
|
||||
return 0
|
||||
|
||||
total_size = 0
|
||||
try:
|
||||
for file_path in dir_path.rglob('*'):
|
||||
if file_path.is_file():
|
||||
total_size += file_path.stat().st_size
|
||||
except (PermissionError, OSError):
|
||||
pass
|
||||
|
||||
return total_size
|
||||
|
||||
@staticmethod
|
||||
def make_executable(file_path: Union[str, Path]) -> bool:
|
||||
"""Make file executable (Unix/Linux/Mac)."""
|
||||
if os.name == 'nt': # Windows
|
||||
return True # Windows doesn't use Unix permissions
|
||||
|
||||
try:
|
||||
path = Path(file_path)
|
||||
current_mode = path.stat().st_mode
|
||||
path.chmod(current_mode | 0o111) # Add execute permission
|
||||
return True
|
||||
except (OSError, PermissionError) as e:
|
||||
logging.error(f"Failed to make '{file_path}' executable: {e}")
|
||||
return False
|
||||
|
||||
|
||||
# Convenience functions
|
||||
def ensure_directory(path: Union[str, Path]) -> bool:
|
||||
"""Ensure directory exists."""
|
||||
return IOHelpers.ensure_directory(path)
|
||||
|
||||
|
||||
def safe_read_file(file_path: Union[str, Path]) -> Optional[str]:
|
||||
"""Safely read file content."""
|
||||
return IOHelpers.safe_read_file(file_path)
|
||||
|
||||
|
||||
def safe_write_file(file_path: Union[str, Path], content: str) -> bool:
|
||||
"""Safely write content to file."""
|
||||
return IOHelpers.safe_write_file(file_path, content)
|
||||
|
||||
|
||||
def read_json(file_path: Union[str, Path], default: Any = None) -> Any:
|
||||
"""Read JSON file."""
|
||||
return IOHelpers.read_json(file_path, default)
|
||||
|
||||
|
||||
def write_json(file_path: Union[str, Path], data: Any) -> bool:
|
||||
"""Write data to JSON file."""
|
||||
return IOHelpers.write_json(file_path, data)
|
||||
|
||||
|
||||
def read_yaml(file_path: Union[str, Path], default: Any = None) -> Any:
|
||||
"""Read YAML file."""
|
||||
return IOHelpers.read_yaml(file_path, default)
|
||||
|
||||
|
||||
def write_yaml(file_path: Union[str, Path], data: Any) -> bool:
|
||||
"""Write data to YAML file."""
|
||||
return IOHelpers.write_yaml(file_path, data)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test I/O operations
|
||||
test_dir = Path("test_io")
|
||||
|
||||
# Test directory creation
|
||||
print(f"Create directory: {ensure_directory(test_dir)}")
|
||||
|
||||
# Test file operations
|
||||
test_file = test_dir / "test.txt"
|
||||
content = "Hello, World!\nThis is a test file."
|
||||
|
||||
print(f"Write file: {safe_write_file(test_file, content)}")
|
||||
print(f"Read file: {safe_read_file(test_file)}")
|
||||
|
||||
# Test JSON operations
|
||||
json_file = test_dir / "test.json"
|
||||
json_data = {"name": "test", "numbers": [1, 2, 3], "nested": {"key": "value"}}
|
||||
|
||||
print(f"Write JSON: {write_json(json_file, json_data)}")
|
||||
print(f"Read JSON: {read_json(json_file)}")
|
||||
|
||||
# Test file stats
|
||||
stats = IOHelpers.get_file_stats(test_file)
|
||||
print(f"File stats: {stats}")
|
||||
|
||||
# Cleanup
|
||||
shutil.rmtree(test_dir, ignore_errors=True)
|
||||
Reference in New Issue
Block a user