Skip to main content

#!/usr/bin/env python3 """ Component Migration Script — Relocate components between CODITECT repositories.

Moves component files from coditect-core to a target repository within the rollout-master monorepo tree, creating redirect stubs in the source and updating all framework registries.

Part of the Component Migration Workflow System (H.22).

Usage: # Single component migration python3 scripts/component-migration.py
--type skill --name pdf-to-markdown
--to coditect-research-continuum
--target-path src/pipeline/
--reason "Product code moved to dedicated product repository"

# Batch migration
python3 scripts/component-migration.py \
--batch skill:pdf-to-markdown,command:pdf-to-markdown,command:udom-navigator \
--to coditect-research-continuum \
--reason "Research pipeline relocated"

# Dry run
python3 scripts/component-migration.py \
--type skill --name pdf-to-markdown \
--to coditect-research-continuum --dry-run

# Deprecation (stub only, no target repo)
python3 scripts/component-migration.py \
--type agent --name old-agent \
--deprecate --reason "Superseded by new-agent"

# Rollback a migration
python3 scripts/component-migration.py \
--rollback --type skill --name pdf-to-markdown

Created: 2026-02-11 Author: Claude (Opus 4.6) Task: H.22.1 ADRs: ADR-174 (Research Continuum), ADR-118 (Database Architecture) """

import argparse import json import re import shutil import sys from datetime import datetime, timezone from pathlib import Path from typing import Dict, List, Optional, Tuple

=== Path Resolution ===

SCRIPT_DIR = Path(file).parent FRAMEWORK_ROOT = SCRIPT_DIR.parent # coditect-core/

rollout-master is 3 levels up: coditect-core -> core -> submodules -> rollout-master

ROLLOUT_ROOT = FRAMEWORK_ROOT.parent.parent.parent

Registry paths

REGISTRY_PATH = FRAMEWORK_ROOT / "config" / "framework-registry.json" ACTIVATION_PATH = FRAMEWORK_ROOT / "config" / "component-activation-status.json" COUNTS_PATH = FRAMEWORK_ROOT / "config" / "component-counts.json"

=== Component Type Configuration ===

COMPONENT_TYPES = { "skill": { "source_pattern": "skills/{name}/", "source_files": "skills/{name}/**/*", "main_file": "skills/{name}/SKILL.md", "stub_file": "skills/{name}/SKILL.md", }, "command": { "source_pattern": "commands/{name}.md", "source_files": "commands/{name}.md", "main_file": "commands/{name}.md", "stub_file": "commands/{name}.md", }, "agent": { "source_pattern": "agents/{name}.md", "source_files": "agents/{name}.md", "main_file": "agents/{name}.md", "stub_file": "agents/{name}.md", }, "hook": { "source_pattern": "hooks/{name}.py", "source_files": "hooks/{name}.py", "main_file": "hooks/{name}.py", "stub_file": "hooks/{name}.py", }, "script": { "source_pattern": "scripts/{name}.py", "source_files": "scripts/{name}.py", "main_file": "scripts/{name}.py", "stub_file": "scripts/{name}.py", }, "workflow": { "source_pattern": "workflows/{name}.yaml", "source_files": "workflows/{name}.yaml", "main_file": "workflows/{name}.yaml", "stub_file": "workflows/{name}.yaml", }, "schema": { "source_pattern": "config/schemas/{name}.schema.json", "source_files": "config/schemas/{name}.schema.json", "main_file": "config/schemas/{name}.schema.json", "stub_file": None, # Schemas get copied, not stubbed }, }

=== Discovery ===

def discover_component_files(comp_type: str, name: str) -> List[Path]: """Discover all files belonging to a component.

H.22.1.2: File discovery by component type + name.
"""
config = COMPONENT_TYPES.get(comp_type)
if not config:
print(f"ERROR: Unknown component type '{comp_type}'")
sys.exit(1)

files = []
pattern = config["source_pattern"].format(name=name)
source_path = FRAMEWORK_ROOT / pattern

if comp_type == "skill":
# Skills are directories — collect all files recursively
skill_dir = FRAMEWORK_ROOT / "skills" / name
if skill_dir.is_dir():
for f in skill_dir.rglob("*"):
if f.is_file() and not _should_skip(f):
files.append(f)
else:
print(f"WARNING: Skill directory not found: {skill_dir}")
else:
# Single-file components
if source_path.is_file():
files.append(source_path)
else:
print(f"WARNING: Component file not found: {source_path}")

return files

def _should_skip(path: Path) -> bool: """Check if a file should be skipped during migration.""" skip_dirs = { ".venv", "venv", "pycache", ".git", "node_modules", "analyze-new-artifacts", # Large batch output artifacts } skip_patterns = {".pyc", ".pyo", ".DS_Store"}

# Skip files in excluded directories
for part in path.parts:
if part in skip_dirs:
return True

# Skip by pattern
if path.suffix in {".pyc", ".pyo"}:
return True
if path.name == ".DS_Store":
return True

return False

=== Target Resolution ===

def resolve_target_repo(repo_name: str) -> Optional[Path]: """Resolve a repository name to its path in the rollout-master tree.

Searches submodules/products/, submodules/core/, and submodules/ directly.
"""
search_dirs = [
ROLLOUT_ROOT / "submodules" / "products",
ROLLOUT_ROOT / "submodules" / "core",
ROLLOUT_ROOT / "submodules" / "cloud",
ROLLOUT_ROOT / "submodules" / "r-and-d",
ROLLOUT_ROOT / "submodules",
]

for search_dir in search_dirs:
candidate = search_dir / repo_name
if candidate.is_dir():
return candidate

return None

=== File Copy ===

def copy_files_to_target( files: List[Path], comp_type: str, name: str, target_repo: Path, target_path: str, dry_run: bool = False, ) -> List[Tuple[Path, Path]]: """Copy component files to target repository.

H.22.1.3: File copy to target repo.
Returns list of (source, destination) tuples.
"""
copied = []
target_base = target_repo / target_path.rstrip("/")

for source_file in files:
# Calculate relative path from component root
if comp_type == "skill":
skill_dir = FRAMEWORK_ROOT / "skills" / name
rel_path = source_file.relative_to(skill_dir)
dest = target_base / name / rel_path
else:
dest = target_base / source_file.name

if dry_run:
print(f" COPY: {source_file.relative_to(FRAMEWORK_ROOT)}")
print(f" -> {dest.relative_to(ROLLOUT_ROOT)}")
copied.append((source_file, dest))
continue

# Create parent directory
dest.parent.mkdir(parents=True, exist_ok=True)

# Copy file
shutil.copy2(source_file, dest)
copied.append((source_file, dest))
print(f" COPIED: {source_file.relative_to(FRAMEWORK_ROOT)}")
print(f" -> {dest.relative_to(ROLLOUT_ROOT)}")

return copied

=== Stub Creation ===

def create_stub( comp_type: str, name: str, target_repo_name: str, target_path: str, reason: str, deprecated: bool = False, dry_run: bool = False, ) -> Optional[Path]: """Create a redirect stub for a relocated component.

H.22.1.4: Stub creation with standardized frontmatter.
"""
config = COMPONENT_TYPES.get(comp_type)
if not config or not config.get("stub_file"):
return None

stub_path = FRAMEWORK_ROOT / config["stub_file"].format(name=name)
now = datetime.now(timezone.utc).strftime("%Y-%m-%d")
title = _kebab_to_title(name)
status = "deprecated" if deprecated else "relocated"
original_path = config["source_pattern"].format(name=name)

# Read existing frontmatter to preserve original metadata
original_frontmatter = _read_frontmatter(stub_path)
original_created = original_frontmatter.get("created", now)
original_keywords = original_frontmatter.get("keywords", [])

# Build stub content based on file type
if comp_type in ("hook", "script"):
# Python stubs
stub_content = _create_python_stub(
comp_type, name, title, status, target_repo_name,
target_path, original_path, now, reason
)
elif comp_type == "workflow":
# YAML stubs
stub_content = _create_yaml_stub(
name, title, status, target_repo_name,
target_path, original_path, now, reason
)
else:
# Markdown stubs (skill, command, agent)
stub_content = _create_markdown_stub(
comp_type, name, title, status, target_repo_name,
target_path, original_path, now, reason,
original_created, original_keywords, deprecated
)

if dry_run:
print(f" STUB: {stub_path.relative_to(FRAMEWORK_ROOT)}")
print(f" Status: {status}")
print(f" Target: {target_repo_name}/{target_path}")
return stub_path

# For skills, ensure the directory exists
stub_path.parent.mkdir(parents=True, exist_ok=True)

stub_path.write_text(stub_content, encoding="utf-8")
print(f" STUB CREATED: {stub_path.relative_to(FRAMEWORK_ROOT)}")
return stub_path

def _create_markdown_stub( comp_type, name, title, status, target_repo_name, target_path, original_path, date, reason, original_created, original_keywords, deprecated ): """Create markdown stub for skill/command/agent.""" keywords_list = list(set(["relocated", "stub"] + (original_keywords or []))) keywords_yaml = "\n".join(f" - {kw}" for kw in keywords_list)

relocated_fields = ""
if not deprecated:
relocated_fields = f"""relocated_to: {target_repo_name}

relocated_path: {target_path}"""

return f"""---

title: "{title} ({"Deprecated" if deprecated else "Relocated"})" component_type: {comp_type} version: 1.0.0 status: {status} {relocated_fields} relocated_date: "{date}" original_path: "{original_path}" reason: "{reason}" summary: "{"DEPRECATED" if deprecated else "RELOCATED"} — see {target_repo_name if not deprecated else reason}" keywords: {keywords_yaml} created: "{original_created}" updated: "{date}"

{title}

This component has been {"deprecated" if deprecated else f"relocated to {target_repo_name}"}.

{"" if deprecated else f"> New location: {target_repo_name}/{target_path}"}

{"Deprecated" if deprecated else "Relocated"}: {date} Reason: {reason}

This stub exists so that framework discovery (skill catalog, command index, component search) continues to find this component and directs users to its new home.

Usage

{"This component is deprecated and should no longer be used." if deprecated else f"See the {target_repo_name} README for current usage instructions."} """

def _create_python_stub( comp_type, name, title, status, target_repo_name, target_path, original_path, date, reason ): """Create Python stub for hook/script.""" return f'''#!/usr/bin/env python3 """ {title} — RELOCATED

This component has been relocated to {target_repo_name}. New location: {target_repo_name}/{target_path} Relocated: {date} Reason: {reason}

Status: {status} Original path: {original_path}

This stub exists so that framework discovery continues to find this component and directs users to its new home. """

import sys

def main(): print(f"[RELOCATED] {title} has moved to {target_repo_name}/{target_path}") print(f"See: https://github.com/coditect-ai/{target_repo_name}") sys.exit(0)

if name == "main": main() '''

def _create_yaml_stub( name, title, status, target_repo_name, target_path, original_path, date, reason ): """Create YAML stub for workflow.""" return f"""# {title} — RELOCATED

This workflow has been relocated to {target_repo_name}.

New location: {target_repo_name}/{target_path}

Relocated: {date}

Reason: {reason}

name: "{name}" status: "{status}" relocated_to: "{target_repo_name}" relocated_path: "{target_path}" relocated_date: "{date}" original_path: "{original_path}" reason: "{reason}"

steps: [] """

=== Registry Updates ===

def update_registries( comp_type: str, name: str, target_repo_name: str, target_path: str, deprecated: bool = False, dry_run: bool = False, ) -> Dict[str, bool]: """Update all framework registries for a relocated component.

H.22.1.5: Registry updates.
Updates: framework-registry.json, component-activation-status.json, component-counts.json
"""
results = {
"framework_registry": False,
"activation_status": False,
"component_counts": False,
}

status = "deprecated" if deprecated else "relocated"

# 1. Update framework-registry.json
results["framework_registry"] = _update_framework_registry(
comp_type, name, target_repo_name, target_path, status, dry_run
)

# 2. Update component-activation-status.json
results["activation_status"] = _update_activation_status(
comp_type, name, status, dry_run
)

# 3. Update component-counts.json (just re-run the counter script)
if not dry_run:
import subprocess
counter_script = SCRIPT_DIR / "update-component-counts.py"
if counter_script.exists():
result = subprocess.run(
[sys.executable, str(counter_script)],
capture_output=True, text=True
)
results["component_counts"] = result.returncode == 0
if result.returncode == 0:
print(" REGISTRY: component-counts.json updated")
else:
print(f" WARNING: component-counts.py failed: {result.stderr[:200]}")
else:
print(f" WARNING: Counter script not found: {counter_script}")
else:
print(f" REGISTRY: Would update component-counts.json")
results["component_counts"] = True

return results

def _update_framework_registry( comp_type, name, target_repo_name, target_path, status, dry_run ) -> bool: """Update framework-registry.json to mark component as relocated.""" if not REGISTRY_PATH.exists(): print(f" WARNING: Registry not found: {REGISTRY_PATH}") return False

try:
registry = json.loads(REGISTRY_PATH.read_text(encoding="utf-8"))
except (json.JSONDecodeError, OSError) as e:
print(f" WARNING: Cannot read registry: {e}")
return False

# Find the component in the registry
config = COMPONENT_TYPES.get(comp_type, {})
registry_key = {
"skill": "skills",
"command": "commands",
"agent": "agents",
"hook": "hooks",
"script": "scripts",
"workflow": "workflows",
}.get(comp_type)

if not registry_key:
return False

components = registry.get("components", {})
section = components.get(registry_key, {})

# Search through categories
found = False
if isinstance(section, dict):
categories = section.get("categories", {})
for cat_name, cat_list in categories.items():
if isinstance(cat_list, list):
for item in cat_list:
if isinstance(item, dict) and item.get("id") == name:
if dry_run:
print(f" REGISTRY: Would mark {comp_type}/{name} as {status} in framework-registry.json")
else:
item["status"] = status
item["relocated_to"] = target_repo_name
item["relocated_path"] = target_path
item["relocated_date"] = datetime.now(timezone.utc).isoformat()
found = True
break
if found:
break

if not found:
# Component not in registry — that's OK, not all are
if dry_run:
print(f" REGISTRY: {comp_type}/{name} not found in framework-registry.json (OK)")
return True

if not dry_run:
registry["last_updated"] = datetime.now(timezone.utc).isoformat()
REGISTRY_PATH.write_text(
json.dumps(registry, indent=2, ensure_ascii=False) + "\n",
encoding="utf-8",
)
print(f" REGISTRY: framework-registry.json updated ({comp_type}/{name} -> {status})")

return True

def _update_activation_status(comp_type, name, status, dry_run) -> bool: """Update component-activation-status.json.""" # Check both possible paths activation_path = ACTIVATION_PATH if not activation_path.exists(): activation_path = FRAMEWORK_ROOT / ".coditect" / "component-activation-status.json" if not activation_path.exists(): print(f" WARNING: Activation status file not found") return False

try:
activation = json.loads(activation_path.read_text(encoding="utf-8"))
except (json.JSONDecodeError, OSError) as e:
print(f" WARNING: Cannot read activation status: {e}")
return False

components = activation.get("components", [])
found = False

for comp in components:
if comp.get("type") == comp_type and comp.get("name") == name:
if dry_run:
print(f" REGISTRY: Would mark {comp_type}/{name} as {status} in activation-status")
else:
comp["status"] = status
comp["activated"] = False
comp["reason"] = f"Component {status}"
comp["relocated_at"] = datetime.now(timezone.utc).isoformat()
found = True
break

if not found:
if dry_run:
print(f" REGISTRY: {comp_type}/{name} not found in activation-status (OK)")
return True

if not dry_run:
activation["activation_summary"]["last_updated"] = datetime.now(timezone.utc).isoformat()
activation_path.write_text(
json.dumps(activation, indent=2, ensure_ascii=False) + "\n",
encoding="utf-8",
)
print(f" REGISTRY: activation-status updated ({comp_type}/{name} -> {status})")

return True

=== Rollback ===

def rollback_migration(comp_type: str, name: str, dry_run: bool = False) -> bool: """Rollback a migration by restoring files from git.

H.22.1.8: Rollback capability.
Uses git checkout to restore original files from the last commit.
"""
import subprocess

config = COMPONENT_TYPES.get(comp_type)
if not config:
print(f"ERROR: Unknown component type '{comp_type}'")
return False

pattern = config["source_pattern"].format(name=name)

if comp_type == "skill":
# Restore entire skill directory
restore_path = f"skills/{name}/"
else:
restore_path = pattern

if dry_run:
print(f" ROLLBACK: Would restore {restore_path} from git HEAD")
return True

result = subprocess.run(
["git", "checkout", "HEAD", "--", restore_path],
cwd=str(FRAMEWORK_ROOT),
capture_output=True,
text=True,
)

if result.returncode == 0:
print(f" ROLLBACK: Restored {restore_path} from git HEAD")
return True
else:
print(f" ROLLBACK FAILED: {result.stderr.strip()}")
return False

=== Preflight ===

def preflight_check( comp_type: str, name: str, target_repo_name: Optional[str], target_path: str, deprecated: bool = False, ) -> List[str]: """Run preflight checks before migration.

Returns list of error messages. Empty list means all checks pass.
"""
errors = []

# Check source exists
files = discover_component_files(comp_type, name)
if not files:
errors.append(f"Source not found: {comp_type}/{name}")

# Check target repo exists (unless deprecating)
if not deprecated:
if not target_repo_name:
errors.append("Target repository name required (use --to)")
else:
target_repo = resolve_target_repo(target_repo_name)
if not target_repo:
errors.append(f"Target repo not found in rollout-master tree: {target_repo_name}")

# Check for existing stub (already migrated?)
config = COMPONENT_TYPES.get(comp_type, {})
if config.get("stub_file"):
stub_path = FRAMEWORK_ROOT / config["stub_file"].format(name=name)
if stub_path.exists():
content = stub_path.read_text(encoding="utf-8")
if "status: relocated" in content or "status: deprecated" in content:
errors.append(f"Component already has a stub: {stub_path.relative_to(FRAMEWORK_ROOT)}")

return errors

=== Batch Processing ===

def parse_batch(batch_str: str) -> List[Tuple[str, str]]: """Parse batch string into list of (type, name) tuples.

H.22.1.6: Batch mode.
Format: type:name,type:name,...
"""
items = []
for item in batch_str.split(","):
item = item.strip()
if ":" not in item:
print(f"WARNING: Invalid batch item '{item}' (expected type:name)")
continue
comp_type, name = item.split(":", 1)
if comp_type not in COMPONENT_TYPES:
print(f"WARNING: Unknown type '{comp_type}' in batch item '{item}'")
continue
items.append((comp_type.strip(), name.strip()))
return items

=== Utility ===

def _kebab_to_title(name: str) -> str: """Convert kebab-case to Title Case.""" return " ".join(word.capitalize() for word in name.split("-"))

def _read_frontmatter(path: Path) -> dict: """Read YAML frontmatter from a markdown file.""" if not path.exists(): return {}

content = path.read_text(encoding="utf-8")
if not content.startswith("---"):
return {}

# Find closing ---
end = content.find("---", 3)
if end == -1:
return {}

frontmatter_text = content[3:end].strip()
result = {}

for line in frontmatter_text.split("\n"):
if ":" in line and not line.startswith(" ") and not line.startswith("-"):
key, _, value = line.partition(":")
key = key.strip()
value = value.strip().strip("'\"")
if key == "keywords":
# Parse keywords list
keywords = []
continue
result[key] = value

# Simple keyword parsing
if "keywords" not in result:
kw_match = re.findall(r"keywords:\s*\n((?:\s+-\s+.+\n?)+)", content[:end])
if kw_match:
keywords = [
line.strip().lstrip("- ").strip("'\"")
for line in kw_match[0].strip().split("\n")
if line.strip().startswith("-")
]
result["keywords"] = keywords

return result

=== Main Execution ===

def migrate_single( comp_type: str, name: str, target_repo_name: str, target_path: str, reason: str, deprecated: bool = False, dry_run: bool = False, keep_source: bool = False, ) -> bool: """Execute a single component migration.""" status_label = "DEPRECATE" if deprecated else "MIGRATE" print(f"\n{'='*60}") print(f" {status_label}: {comp_type}/{name}") if not deprecated: print(f" Target: {target_repo_name}/{target_path}") print(f"{'='*60}")

# Phase 6a: Preflight
print(f"\n--- Phase 6a: PREFLIGHT ---")
errors = preflight_check(comp_type, name, target_repo_name, target_path, deprecated)
if errors:
print(f" PREFLIGHT FAILED:")
for err in errors:
print(f" - {err}")
return False
print(f" PREFLIGHT: All checks passed")

# Phase 6b: Copy (skip for deprecation)
files = discover_component_files(comp_type, name)
print(f"\n--- Phase 6b: COPY ({len(files)} files) ---")
if deprecated:
print(f" SKIP: Deprecation mode — no files to copy")
elif not files:
print(f" WARNING: No files found for {comp_type}/{name}")
else:
target_repo = resolve_target_repo(target_repo_name)
if target_repo:
copy_files_to_target(
files, comp_type, name, target_repo, target_path, dry_run
)
else:
print(f" ERROR: Cannot resolve target repo: {target_repo_name}")
return False

# Phase 6c: Stub
print(f"\n--- Phase 6c: STUB ---")
stub_path = create_stub(
comp_type, name,
target_repo_name or "",
target_path,
reason,
deprecated=deprecated,
dry_run=dry_run,
)
if stub_path:
print(f" STUB: {'Would create' if dry_run else 'Created'} at {stub_path.relative_to(FRAMEWORK_ROOT)}")
else:
print(f" STUB: No stub needed for type '{comp_type}'")

# Phase 6d: Registry
print(f"\n--- Phase 6d: REGISTRY ---")
registry_results = update_registries(
comp_type, name,
target_repo_name or "",
target_path,
deprecated=deprecated,
dry_run=dry_run,
)
for reg_name, success in registry_results.items():
status_icon = "OK" if success else "FAIL"
print(f" {reg_name}: {status_icon}")

# Phase 6e: Verify
print(f"\n--- Phase 6e: VERIFY ---")
if not dry_run and stub_path and stub_path.exists():
content = stub_path.read_text(encoding="utf-8")
status_type = "deprecated" if deprecated else "relocated"
checks = {
f"status: {status_type}": f"status: {status_type}" in content,
"stub file exists": stub_path.exists(),
}
if not deprecated:
checks["relocated_to present"] = "relocated_to:" in content
checks["relocated_date present"] = "relocated_date:" in content

all_pass = all(checks.values())
for check, passed in checks.items():
print(f" {'PASS' if passed else 'FAIL'}: {check}")

if not all_pass:
print(f" VERIFICATION: Some checks failed")
return False
elif dry_run:
print(f" VERIFY: Skipped (dry run)")
else:
print(f" VERIFY: OK (no stub to verify)")

print(f"\n{'='*60}")
mode = "DRY RUN" if dry_run else "COMPLETE"
print(f" {mode}: {comp_type}/{name} {'deprecated' if deprecated else 'migrated'}")
print(f"{'='*60}")

return True

def main(): parser = argparse.ArgumentParser( description="Migrate CODITECT components between repositories", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=""" Examples:

Single migration

%(prog)s --type skill --name pdf-to-markdown --to coditect-research-continuum --target-path src/pipeline/

Batch migration

%(prog)s --batch skill:pdf-to-markdown,command:pdf-to-markdown --to coditect-research-continuum

Deprecation

%(prog)s --type agent --name old-agent --deprecate --reason "Superseded"

Dry run

%(prog)s --type skill --name pdf-to-markdown --to coditect-research-continuum --dry-run

Rollback

%(prog)s --rollback --type skill --name pdf-to-markdown """, )

# Component selection
parser.add_argument("--type", "-t", choices=list(COMPONENT_TYPES.keys()),
help="Component type")
parser.add_argument("--name", "-n", help="Component name (kebab-case)")
parser.add_argument("--batch", "-b", help="Batch: type:name,type:name,...")

# Target
parser.add_argument("--to", dest="target_repo",
help="Target repository name (e.g., coditect-research-continuum)")
parser.add_argument("--target-path", default="src/",
help="Path within target repo (default: src/)")
parser.add_argument("--reason", default="Component relocated to dedicated repository",
help="Reason for migration")

# Modes
parser.add_argument("--deprecate", action="store_true",
help="Deprecate component (stub only, no target)")
parser.add_argument("--rollback", action="store_true",
help="Rollback a migration (restore from git)")
parser.add_argument("--dry-run", action="store_true",
help="Preview changes without executing")
parser.add_argument("--keep-source", action="store_true",
help="Keep source files after migration (don't replace with stubs)")
parser.add_argument("--verbose", "-v", action="store_true",
help="Verbose output")

args = parser.parse_args()

# Header
print("=" * 60)
print(" CODITECT Component Migration")
print(f" Mode: {'DRY RUN' if args.dry_run else 'ROLLBACK' if args.rollback else 'DEPRECATE' if args.deprecate else 'MIGRATE'}")
print(f" Framework: {FRAMEWORK_ROOT}")
print(f" Rollout: {ROLLOUT_ROOT}")
print("=" * 60)

# Validate arguments
if args.rollback:
if not args.type or not args.name:
parser.error("--rollback requires --type and --name")
success = rollback_migration(args.type, args.name, args.dry_run)
sys.exit(0 if success else 1)

if args.batch:
# Batch mode
items = parse_batch(args.batch)
if not items:
parser.error("No valid items in --batch")
if not args.deprecate and not args.target_repo:
parser.error("--to is required for migration (or use --deprecate)")

print(f"\nBatch migration: {len(items)} components")
results = []
for comp_type, name in items:
success = migrate_single(
comp_type, name,
args.target_repo or "",
args.target_path,
args.reason,
deprecated=args.deprecate,
dry_run=args.dry_run,
keep_source=args.keep_source,
)
results.append((comp_type, name, success))

# Summary
print(f"\n{'='*60}")
print(f" BATCH {'DRY RUN' if args.dry_run else 'COMPLETE'}: {len(results)} components")
print(f"{'='*60}")
for comp_type, name, success in results:
icon = "OK" if success else "FAIL"
print(f" {icon}: {comp_type}/{name}")

all_success = all(s for _, _, s in results)
sys.exit(0 if all_success else 1)

else:
# Single mode
if not args.type or not args.name:
parser.error("--type and --name are required (or use --batch)")
if not args.deprecate and not args.target_repo:
parser.error("--to is required for migration (or use --deprecate)")

success = migrate_single(
args.type, args.name,
args.target_repo or "",
args.target_path,
args.reason,
deprecated=args.deprecate,
dry_run=args.dry_run,
keep_source=args.keep_source,
)
sys.exit(0 if success else 1)

if name == "main": main()