#!/usr/bin/env python3 """ Nomenclature Migration Script (ADR-100)
Migrates CODITECT components from current naming to Track-Integrated nomenclature.
Format: [DOMAIN].[CONTENT].[###]-[DESCRIPTIVE-NAME]
Usage: python3 nomenclature-migration.py --plan # Show migration plan python3 nomenclature-migration.py --dry-run # Preview changes python3 nomenclature-migration.py --execute # Execute migration python3 nomenclature-migration.py --validate # Validate after migration python3 nomenclature-migration.py --rollback # Rollback using manifest
Author: CODITECT Team Version: 1.0.0 Created: 2026-01-21 ADR: ADR-100 """
import argparse import json import os import re import shutil import sys from dataclasses import dataclass, field from datetime import datetime from pathlib import Path from typing import Dict, List, Optional, Set, Tuple
=============================================================================
CONFIGURATION
=============================================================================
Content track definitions
CONTENT_TRACKS = { 'P': 'Product', # Ships to customers 'U': 'User', # Customer support, training 'V': 'Developer', # Contributor documentation 'O': 'Operations', # Deployment, infrastructure 'X': 'External', # Marketing, GTM 'Z': 'Generated', # Auto-built, read-only }
Domain track definitions (existing)
DOMAIN_TRACKS = { 'A': 'Backend', 'B': 'Frontend', 'C': 'DevOps', 'D': 'Security', 'E': 'Testing', 'F': 'Documentation', 'G': 'DMS', 'H': 'Innovation', }
Directory mapping: current -> (domain, content, sequence, name)
DIRECTORY_MAPPING = { # Innovation/Product (H.P) 'H.P.001-AGENTS': ('H', 'P', '001', 'H.P.001-AGENTS'), 'H.P.002-COMMANDS': ('H', 'P', '002', 'H.P.002-COMMANDS'), 'H.P.003-SKILLS': ('H', 'P', '003', 'H.P.003-SKILLS'), 'H.P.004-SCRIPTS': ('H', 'P', '004', 'H.P.004-SCRIPTS'), 'H.P.005-HOOKS': ('H', 'P', '005', 'H.P.005-HOOKS'), 'H.P.006-WORKFLOWS': ('H', 'P', '006', 'H.P.006-WORKFLOWS'), 'H.P.007-PROMPTS': ('H', 'P', '007', 'H.P.007-PROMPTS'), 'H.P.008-TEMPLATES': ('H', 'P', '008', 'H.P.008-TEMPLATES'),
# Documentation/User (F.U)
'F.U.001-GETTING-STARTED': ('F', 'U', '001', 'getting-started'),
'F.U.002-GUIDES': ('F', 'U', '002', 'guides'),
'F.U.003-TUTORIALS': ('F', 'U', '003', 'tutorials'),
'F.U.004-BEST-PRACTICES': ('F', 'U', '004', 'best-practices'),
'F.U.005-TROUBLESHOOTING': ('F', 'U', '005', 'troubleshooting'),
'F.U.006-REFERENCE': ('F', 'U', '006', 'reference'),
'docs-customer/training': ('F', 'U', '007', 'training'),
# Documentation/Developer (F.V)
'F.V.001-ADRS': ('F', 'V', '001', 'adrs'),
'F.V.002-ARCHITECTURE': ('F', 'V', '002', 'architecture'),
'F.V.003-STANDARDS': ('F', 'V', '003', 'standards'),
'F.V.004-RESEARCH': ('F', 'V', '004', 'research'),
'F.V.005-DEVELOPMENT': ('F', 'V', '005', 'development'),
'F.V.006-OPERATIONS': ('F', 'V', '006', 'operations'),
# DevOps/Operations (C.O)
'infrastructure': ('C', 'O', '001', 'infrastructure'),
'deployment': ('C', 'O', '002', 'deployment'),
# Standards (stays at root but gets coded)
'F.V.010-STANDARDS-CORE': ('F', 'V', '010', 'standards-core'),
# Config
'H.P.009-CONFIG': ('H', 'P', '009', 'H.P.009-CONFIG'),
# Internal project (special handling)
'H.V.001-PROJECT-MANAGEMENT': ('H', 'V', '001', 'project-management'),
}
Files that should NOT be migrated (stay at root)
EXCLUDE_PATTERNS = [ 'CLAUDE.md', 'README.md', 'LICENSE', '.git', '.gitignore', '.gitmodules', 'requirements.txt', 'pyproject.toml', 'setup.py', 'package.json', '.mcp.json', 'node_modules', 'pycache', '.venv', '.env', '.pyc', '.DS_Store', 'target', # Rust build artifacts 'dist', # Build output 'build', # Build output '.fingerprint', # Cargo fingerprints 'playwright-report', # Test artifacts 'test-results', # Test artifacts '.log', # Log files ]
Validation regex for new nomenclature
NOMENCLATURE_PATTERN = re.compile(r'^([A-H].)?[POUVXZ].\d{3}-[A-Z][A-Z0-9-]+$')
=============================================================================
DATA CLASSES
=============================================================================
@dataclass class MigrationItem: """Single migration item.""" source: Path destination: Path item_type: str # 'directory' or 'file' status: str = 'pending' # pending, migrated, skipped, error error_message: str = ''
@dataclass class MigrationManifest: """Migration manifest for tracking and rollback.""" version: str = '1.0.0' adr: str = 'ADR-100' created: str = '' completed: str = '' status: str = 'pending' items: List[MigrationItem] = field(default_factory=list) reference_updates: List[Dict] = field(default_factory=list) errors: List[str] = field(default_factory=list)
def to_dict(self) -> Dict:
return {
'version': self.version,
'adr': self.adr,
'created': self.created,
'completed': self.completed,
'status': self.status,
'items': [
{
'source': str(i.source),
'destination': str(i.destination),
'item_type': i.item_type,
'status': i.status,
'error_message': i.error_message,
}
for i in self.items
],
'reference_updates': self.reference_updates,
'errors': self.errors,
}
@classmethod
def from_dict(cls, data: Dict) -> 'MigrationManifest':
manifest = cls(
version=data.get('version', '1.0.0'),
adr=data.get('adr', 'ADR-100'),
created=data.get('created', ''),
completed=data.get('completed', ''),
status=data.get('status', 'pending'),
reference_updates=data.get('reference_updates', []),
errors=data.get('errors', []),
)
for item_data in data.get('items', []):
manifest.items.append(MigrationItem(
source=Path(item_data['source']),
destination=Path(item_data['destination']),
item_type=item_data['item_type'],
status=item_data.get('status', 'pending'),
error_message=item_data.get('error_message', ''),
))
return manifest
=============================================================================
HELPER FUNCTIONS
=============================================================================
def format_nomenclature(domain: str, content: str, seq: str, name: str) -> str: """Format a nomenclature string.""" name_upper = name.upper().replace('_', '-').replace(' ', '-') if domain: return f"{domain}.{content}.{seq}-{name_upper}" return f"{content}.{seq}-{name_upper}"
def get_new_path(current_path: Path, base_dir: Path) -> Optional[Tuple[Path, str]]: """ Get the new path for a file/directory based on mapping. Returns (new_path, nomenclature_code) or None if not mapped. """ rel_path = current_path.relative_to(base_dir) rel_str = str(rel_path)
# Check direct mapping
for pattern, (domain, content, seq, name) in DIRECTORY_MAPPING.items():
if rel_str == pattern or rel_str.startswith(pattern + '/'):
nomenclature = format_nomenclature(domain, content, seq, name)
if rel_str == pattern:
# Direct match - rename the directory
new_path = base_dir / nomenclature
else:
# Subdirectory/file within mapped directory
remainder = rel_str[len(pattern):].lstrip('/')
new_path = base_dir / nomenclature / remainder
return new_path, nomenclature
return None
def should_exclude(path: Path) -> bool: """Check if path should be excluded from migration.""" name = path.name
# Check if any parent directory should be excluded
for part in path.parts:
if part in ('target', 'node_modules', '__pycache__', '.venv', 'dist', 'build', '.fingerprint', 'playwright-report', 'test-results'):
return True
for pattern in EXCLUDE_PATTERNS:
if pattern.startswith('*'):
# Wildcard pattern
if name.endswith(pattern[1:]):
return True
elif name == pattern:
return True
return False
def find_references(base_dir: Path, old_path: str) -> List[Tuple[Path, int, str]]: """Find all references to old_path in markdown and code files.""" references = []
# File extensions to search
extensions = {'.md', '.py', '.sh', '.yaml', '.yml', '.json', '.ts', '.js'}
for file_path in base_dir.rglob('*'):
if file_path.is_file() and file_path.suffix in extensions:
if should_exclude(file_path):
continue
try:
content = file_path.read_text(encoding='utf-8')
for i, line in enumerate(content.split('\n'), 1):
if old_path in line:
references.append((file_path, i, line.strip()))
except (UnicodeDecodeError, PermissionError):
pass
return references
def is_valid_path_context(line: str, old_path: str) -> bool: """ Check if the path appears in a valid path context, not as part of variable names.
Returns False for cases like:
- has_hooks = True (hooks is part of variable name)
- configure_hooks() (hooks is part of function name)
Returns True for cases like:
- ~/.coditect/hooks/ (actual path)
- "hooks/dispatcher.sh" (string path)
"""
idx = line.find(old_path)
if idx == -1:
return False
# Check character before the match
if idx > 0:
char_before = line[idx - 1]
valid_before = set('/\\~"\'\t ({[:')
if char_before not in valid_before:
return False
# Check character after the match
end_idx = idx + len(old_path)
if end_idx < len(line):
char_after = line[end_idx]
valid_after = set('/\\"\'\t )}]:.')
if char_after not in valid_after:
return False
return True
def context_aware_replace(content: str, old_path: str, new_path: str) -> Tuple[str, int]: """ Replace old_path with new_path only in valid path contexts.
Returns the updated content and count of replacements made.
"""
lines = content.split('\n')
new_lines = []
total_count = 0
for line in lines:
if old_path in line and is_valid_path_context(line, old_path):
# Only replace if in valid context
new_line = line.replace(old_path, new_path)
count = line.count(old_path) - new_line.count(old_path) + new_line.count(new_path)
total_count += line.count(old_path)
new_lines.append(new_line)
else:
new_lines.append(line)
return '\n'.join(new_lines), total_count
def update_references( base_dir: Path, old_path: str, new_path: str, dry_run: bool = True ) -> List[Dict]: """Update all references from old_path to new_path using context-aware replacement.""" updates = []
extensions = {'.md', '.py', '.sh', '.yaml', '.yml', '.json', '.ts', '.js'}
for file_path in base_dir.rglob('*'):
if file_path.is_file() and file_path.suffix in extensions:
if should_exclude(file_path):
continue
try:
content = file_path.read_text(encoding='utf-8')
if old_path in content:
# Use context-aware replacement to avoid replacing in variable names
new_content, count = context_aware_replace(content, old_path, new_path)
if content != new_content:
updates.append({
'file': str(file_path),
'old': old_path,
'new': new_path,
'count': count,
})
if not dry_run:
file_path.write_text(new_content, encoding='utf-8')
except (UnicodeDecodeError, PermissionError) as e:
updates.append({
'file': str(file_path),
'error': str(e),
})
return updates
=============================================================================
MIGRATION FUNCTIONS
=============================================================================
def create_migration_plan(base_dir: Path) -> MigrationManifest: """Create a migration plan based on current directory structure.""" manifest = MigrationManifest( created=datetime.now().isoformat(), )
# Find all directories that need migration
for current_pattern, mapping in DIRECTORY_MAPPING.items():
current_path = base_dir / current_pattern
if current_path.exists():
domain, content, seq, name = mapping
new_name = format_nomenclature(domain, content, seq, name)
new_path = base_dir / new_name
manifest.items.append(MigrationItem(
source=current_path,
destination=new_path,
item_type='directory' if current_path.is_dir() else 'file',
))
return manifest
def execute_migration( manifest: MigrationManifest, base_dir: Path, dry_run: bool = True ) -> MigrationManifest: """Execute the migration plan.""" print(f"\n{'DRY RUN: ' if dry_run else ''}Executing migration...") print(f"Items to migrate: {len(manifest.items)}")
for item in manifest.items:
try:
print(f"\n {item.source.name} -> {item.destination.name}")
if item.destination.exists():
print(f" SKIP: Destination already exists")
item.status = 'skipped'
continue
if not dry_run:
# Create parent directories
item.destination.parent.mkdir(parents=True, exist_ok=True)
# Move the item
shutil.move(str(item.source), str(item.destination))
item.status = 'migrated'
print(f" MIGRATED")
else:
item.status = 'planned'
print(f" PLANNED (dry-run)")
except Exception as e:
item.status = 'error'
item.error_message = str(e)
manifest.errors.append(f"{item.source}: {e}")
print(f" ERROR: {e}")
# Update references
print("\n Updating references...")
for item in manifest.items:
if item.status in ('migrated', 'planned'):
old_rel = str(item.source.relative_to(base_dir))
new_rel = str(item.destination.relative_to(base_dir))
updates = update_references(base_dir, old_rel, new_rel, dry_run)
manifest.reference_updates.extend(updates)
if updates:
print(f" {old_rel} -> {new_rel}: {len(updates)} files")
manifest.completed = datetime.now().isoformat()
manifest.status = 'completed' if not dry_run else 'planned'
return manifest
def validate_migration(base_dir: Path) -> Dict: """Validate the migration was successful.""" results = { 'valid': True, 'nomenclature_compliant': [], 'nomenclature_violations': [], 'broken_references': [], 'stats': {}, }
# Check all top-level directories
for item in base_dir.iterdir():
if item.is_dir() and not should_exclude(item):
name = item.name
if NOMENCLATURE_PATTERN.match(name):
results['nomenclature_compliant'].append(name)
elif name not in ['internal', '.git', '.venv', '__pycache__', 'node_modules']:
# Check if it's a known exception
if name not in ['.coditect', '.claude']:
results['nomenclature_violations'].append(name)
results['valid'] = False
# Check for broken references
extensions = {'.md', '.py', '.sh', '.yaml', '.yml', '.json'}
broken_patterns = ['H.P.001-AGENTS/', 'H.P.002-COMMANDS/', 'H.P.003-SKILLS/', 'H.P.004-SCRIPTS/', 'H.P.005-HOOKS/',
'docs-customer/', 'docs-contributor/']
for file_path in base_dir.rglob('*'):
if file_path.is_file() and file_path.suffix in extensions:
if should_exclude(file_path):
continue
try:
content = file_path.read_text(encoding='utf-8')
for pattern in broken_patterns:
if pattern in content:
# Check if it's an actual reference vs documentation
if f'](../{pattern}' in content or f'"]/{pattern}' in content:
results['broken_references'].append({
'file': str(file_path),
'pattern': pattern,
})
except (UnicodeDecodeError, PermissionError):
pass
results['stats'] = {
'compliant_dirs': len(results['nomenclature_compliant']),
'violations': len(results['nomenclature_violations']),
'broken_refs': len(results['broken_references']),
}
return results
def rollback_migration(manifest_path: Path, base_dir: Path) -> bool: """Rollback migration using manifest.""" if not manifest_path.exists(): print(f"ERROR: Manifest not found: {manifest_path}") return False
manifest = MigrationManifest.from_dict(
json.loads(manifest_path.read_text())
)
print(f"\nRolling back migration from {manifest.created}...")
# Reverse the migrations
for item in reversed(manifest.items):
if item.status == 'migrated':
try:
print(f" {item.destination.name} -> {item.source.name}")
if item.source.exists():
print(f" SKIP: Original already exists")
continue
shutil.move(str(item.destination), str(item.source))
print(f" RESTORED")
except Exception as e:
print(f" ERROR: {e}")
# Reverse reference updates
for update in reversed(manifest.reference_updates):
if 'error' not in update:
try:
file_path = Path(update['file'])
if file_path.exists():
content = file_path.read_text()
content = content.replace(update['new'], update['old'])
file_path.write_text(content)
except Exception as e:
print(f" Reference rollback error: {e}")
print("\nRollback complete.")
return True
=============================================================================
MAIN
=============================================================================
def main(): parser = argparse.ArgumentParser( description='Nomenclature Migration Script (ADR-100)' ) parser.add_argument( '--plan', action='store_true', help='Show migration plan' ) parser.add_argument( '--dry-run', action='store_true', help='Preview changes without executing' ) parser.add_argument( '--execute', action='store_true', help='Execute the migration' ) parser.add_argument( '--validate', action='store_true', help='Validate migration results' ) parser.add_argument( '--rollback', type=Path, help='Rollback using manifest file' ) parser.add_argument( '--base-dir', type=Path, default=Path.cwd(), help='Base directory (default: current)' ) parser.add_argument( '--output', type=Path, help='Output path for manifest' ) parser.add_argument( '--yes', '-y', action='store_true', help='Skip confirmation H.P.007-PROMPTS (for automated execution)' )
args = parser.parse_args()
base_dir = args.base_dir.resolve()
print("=" * 60)
print("NOMENCLATURE MIGRATION (ADR-100)")
print("=" * 60)
print(f"Base directory: {base_dir}")
print(f"Format: [DOMAIN].[CONTENT].[###]-[DESCRIPTIVE-NAME]")
if args.plan or args.dry_run:
manifest = create_migration_plan(base_dir)
print(f"\nMigration Plan ({len(manifest.items)} items):")
print("-" * 60)
for item in manifest.items:
status = "EXISTS" if item.destination.exists() else "NEW"
print(f" [{status}] {item.source.name}")
print(f" -> {item.destination.name}")
if args.dry_run:
manifest = execute_migration(manifest, base_dir, dry_run=True)
if args.output:
args.output.write_text(json.dumps(manifest.to_dict(), indent=2))
print(f"\nManifest written to: {args.output}")
return 0
if args.execute:
print("\n⚠️ WARNING: This will rename directories!")
if not args.yes:
confirm = input("Type 'migrate' to confirm: ")
if confirm != 'migrate':
print("Aborted.")
return 1
else:
print("Auto-confirmed with --yes flag")
manifest = create_migration_plan(base_dir)
manifest = execute_migration(manifest, base_dir, dry_run=False)
# Save manifest for rollback
manifest_path = args.output or base_dir / 'migration-manifest.json'
manifest_path.write_text(json.dumps(manifest.to_dict(), indent=2))
print(f"\nManifest saved to: {manifest_path}")
# Summary
migrated = sum(1 for i in manifest.items if i.status == 'migrated')
skipped = sum(1 for i in manifest.items if i.status == 'skipped')
errors = sum(1 for i in manifest.items if i.status == 'error')
print(f"\nSummary:")
print(f" Migrated: {migrated}")
print(f" Skipped: {skipped}")
print(f" Errors: {errors}")
print(f" References updated: {len(manifest.reference_updates)}")
return 0 if errors == 0 else 1
if args.validate:
results = validate_migration(base_dir)
print(f"\nValidation Results:")
print("-" * 60)
print(f" Compliant directories: {results['stats']['compliant_dirs']}")
print(f" Violations: {results['stats']['violations']}")
print(f" Broken references: {results['stats']['broken_refs']}")
if results['nomenclature_violations']:
print(f"\nViolations:")
for v in results['nomenclature_violations']:
print(f" - {v}")
if results['broken_references']:
print(f"\nBroken references (first 10):")
for ref in results['broken_references'][:10]:
print(f" - {ref['file']}: {ref['pattern']}")
status = "PASSED" if results['valid'] else "FAILED"
print(f"\nValidation: {status}")
return 0 if results['valid'] else 1
if args.rollback:
return 0 if rollback_migration(args.rollback, base_dir) else 1
parser.print_help()
return 1
if name == 'main': sys.exit(main())