Skip to main content

Track M: Security Operations - Part 2

Note: This is Part 2 of the Security Operations evidence document. Part 1 contains M.1.1-M.1.2. This part contains M.1.3-M.1.4 and M.2-M.5.

M.1.3: SCA Vulnerability Scanning (Continued from Part 1)

Dependency Update Automation

# .github/dependabot.yml
version: 2
updates:
# Python dependencies
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
time: "02:00"
timezone: "America/Los_Angeles"
open-pull-requests-limit: 10
reviewers:
- "security-team"
labels:
- "dependencies"
- "security"
commit-message:
prefix: "deps"
prefix-development: "deps-dev"
include: "scope"
ignore:
# Major version updates require manual review for healthcare compliance
- dependency-name: "*"
update-types: ["version-update:semver-major"]
groups:
django-ecosystem:
patterns:
- "django*"
- "djangorestframework*"
security-patches:
update-types:
- "security"

# JavaScript/npm dependencies
- package-ecosystem: "npm"
directory: "/frontend"
schedule:
interval: "daily"
time: "02:00"
open-pull-requests-limit: 10
reviewers:
- "security-team"
- "frontend-team"
labels:
- "dependencies"
- "frontend"
versioning-strategy: increase
groups:
react-ecosystem:
patterns:
- "react*"
- "@testing-library/*"

# Docker base images
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
open-pull-requests-limit: 5
reviewers:
- "devops-team"
labels:
- "dependencies"
- "docker"

# GitHub Actions
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 5
labels:
- "dependencies"
- "ci-cd"

SBOM Management System

# scripts/security/sbom-manager.py
"""
Software Bill of Materials (SBOM) Management

Generates, validates, and manages SBOMs for compliance and supply chain security.
Compliance: Executive Order 14028 (Cybersecurity), NIST SSDF
"""

import json
import hashlib
import subprocess
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional
from dataclasses import dataclass, asdict


@dataclass
class Component:
"""Software component in SBOM"""
name: str
version: str
purl: str # Package URL
licenses: List[str]
supplier: Optional[str] = None
cpe: Optional[str] = None # Common Platform Enumeration
hashes: Optional[Dict[str, str]] = None


class SBOMGenerator:
"""Generate and manage Software Bill of Materials"""

def __init__(self, project_root: Path):
self.project_root = project_root
self.timestamp = datetime.utcnow().isoformat() + "Z"

def generate_python_components(self) -> List[Component]:
"""Extract Python dependencies from requirements.txt and pip freeze"""
components = []

# Run pip list with JSON output
result = subprocess.run(
['pip', 'list', '--format=json'],
capture_output=True,
text=True
)

if result.returncode == 0:
packages = json.loads(result.stdout)

for pkg in packages:
# Get package metadata
metadata_result = subprocess.run(
['pip', 'show', '--verbose', pkg['name']],
capture_output=True,
text=True
)

if metadata_result.returncode == 0:
metadata = self._parse_pip_show(metadata_result.stdout)

component = Component(
name=pkg['name'],
version=pkg['version'],
purl=f"pkg:pypi/{pkg['name']}@{pkg['version']}",
licenses=[metadata.get('License', 'UNKNOWN')],
supplier=metadata.get('Author'),
hashes=self._get_package_hashes(pkg['name'], pkg['version'])
)
components.append(component)

return components

def generate_npm_components(self) -> List[Component]:
"""Extract JavaScript dependencies from package-lock.json"""
components = []
package_lock_path = self.project_root / 'frontend' / 'package-lock.json'

if package_lock_path.exists():
with open(package_lock_path) as f:
data = json.load(f)

for name, info in data.get('packages', {}).items():
if not name: # Skip root package
continue

# Extract package name from node_modules path
pkg_name = name.replace('node_modules/', '')

component = Component(
name=pkg_name,
version=info.get('version', 'unknown'),
purl=f"pkg:npm/{pkg_name}@{info.get('version')}",
licenses=[info.get('license', 'UNKNOWN')],
hashes={'sha512': info.get('integrity', '').replace('sha512-', '')}
if info.get('integrity') else None
)
components.append(component)

return components

def generate_cyclonedx_sbom(self, output_path: Path) -> Dict:
"""Generate CycloneDX format SBOM"""
python_components = self.generate_python_components()
npm_components = self.generate_npm_components()

all_components = python_components + npm_components

sbom = {
"bomFormat": "CycloneDX",
"specVersion": "1.5",
"serialNumber": f"urn:uuid:{self._generate_uuid()}",
"version": 1,
"metadata": {
"timestamp": self.timestamp,
"tools": [
{
"vendor": "BIO-QMS",
"name": "sbom-manager",
"version": "1.0.0"
}
],
"component": {
"type": "application",
"name": "bio-qms-platform",
"version": self._get_app_version(),
"description": "Quality Management System for Life Sciences",
"licenses": [{"license": {"id": "Proprietary"}}]
},
"properties": [
{
"name": "regulatory:fda21cfr11",
"value": "true"
},
{
"name": "regulatory:hipaa",
"value": "true"
},
{
"name": "compliance:soc2",
"value": "true"
}
]
},
"components": [
{
"type": "library",
"name": comp.name,
"version": comp.version,
"purl": comp.purl,
"licenses": [{"license": {"name": lic}} for lic in comp.licenses],
"hashes": [
{"alg": alg, "content": content}
for alg, content in (comp.hashes or {}).items()
] if comp.hashes else []
}
for comp in all_components
],
"dependencies": self._generate_dependency_graph(all_components)
}

# Write SBOM
with open(output_path, 'w') as f:
json.dump(sbom, f, indent=2)

return sbom

def validate_sbom(self, sbom_path: Path) -> Dict:
"""Validate SBOM against CycloneDX schema"""
# Use cyclonedx-cli for validation
result = subprocess.run(
['cyclonedx', 'validate', '--input-file', str(sbom_path)],
capture_output=True,
text=True
)

return {
'valid': result.returncode == 0,
'output': result.stdout,
'errors': result.stderr if result.returncode != 0 else None
}

def sign_sbom(self, sbom_path: Path, key_path: Path) -> Path:
"""Sign SBOM with digital signature for integrity verification"""
signature_path = sbom_path.with_suffix(sbom_path.suffix + '.sig')

# Use cosign to sign
subprocess.run(
[
'cosign', 'sign-blob',
'--key', str(key_path),
str(sbom_path),
'--output-signature', str(signature_path)
],
check=True
)

return signature_path

def _parse_pip_show(self, output: str) -> Dict:
"""Parse pip show output"""
metadata = {}
for line in output.split('\n'):
if ':' in line:
key, value = line.split(':', 1)
metadata[key.strip()] = value.strip()
return metadata

def _get_package_hashes(self, package: str, version: str) -> Dict[str, str]:
"""Get package hashes from PyPI"""
try:
import requests
response = requests.get(f"https://pypi.org/pypi/{package}/{version}/json")
if response.status_code == 200:
data = response.json()
urls = data.get('urls', [])
if urls:
# Get SHA256 from first distribution
return {'sha256': urls[0].get('digests', {}).get('sha256', '')}
except:
pass
return {}

def _generate_uuid(self) -> str:
"""Generate UUID for SBOM serial number"""
import uuid
return str(uuid.uuid4())

def _get_app_version(self) -> str:
"""Get application version from version file or git"""
version_file = self.project_root / 'VERSION'
if version_file.exists():
return version_file.read_text().strip()

# Fallback to git describe
try:
result = subprocess.run(
['git', 'describe', '--tags', '--always'],
capture_output=True,
text=True,
cwd=self.project_root
)
if result.returncode == 0:
return result.stdout.strip()
except:
pass

return '0.0.0-dev'

def _generate_dependency_graph(self, components: List[Component]) -> List[Dict]:
"""Generate dependency relationships"""
# Simplified - would use actual dependency resolution
return [
{
"ref": comp.purl,
"dependsOn": []
}
for comp in components
]


if __name__ == '__main__':
import argparse

parser = argparse.ArgumentParser(description='SBOM Generator and Manager')
parser.add_argument('--generate', action='store_true', help='Generate SBOM')
parser.add_argument('--validate', type=str, help='Validate SBOM file')
parser.add_argument('--sign', type=str, help='Sign SBOM file')
parser.add_argument('--key', type=str, help='Signing key path')
parser.add_argument('--output', type=str, default='sbom.json', help='Output SBOM path')

args = parser.parse_args()

generator = SBOMGenerator(Path.cwd())

if args.generate:
sbom = generator.generate_cyclonedx_sbom(Path(args.output))
print(f"SBOM generated: {args.output}")
print(f"Components: {len(sbom['components'])}")

if args.validate:
result = generator.validate_sbom(Path(args.validate))
if result['valid']:
print(f"✓ SBOM is valid")
else:
print(f"✗ SBOM validation failed:")
print(result['errors'])

if args.sign and args.key:
sig_path = generator.sign_sbom(Path(args.sign), Path(args.key))
print(f"SBOM signed: {sig_path}")

M.1.4: Security Gate in CI/CD

Implementation Overview

Automated security gates in CI/CD pipeline enforce security standards before deployment, with severity-based blocking and exception workflow.

Features:

  • Severity-based blocking: Critical/High findings block deployment
  • Exception workflow: Security team approval for known acceptable risks
  • Automated remediation: Auto-create PRs for dependency updates
  • Metrics tracking: Security gate pass/fail rates

Security Gate Pipeline

# .github/workflows/security-gate.yml
name: Security Gate

on:
pull_request:
types: [opened, synchronize, reopened]
workflow_dispatch:

permissions:
contents: read
security-events: write
pull-requests: write
statuses: write

jobs:
security-gate-check:
name: Security Gate Validation
runs-on: ubuntu-latest
outputs:
gate-status: ${{ steps.gate-decision.outputs.status }}
findings-summary: ${{ steps.gate-decision.outputs.summary }}

steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'

- name: Install security gate tools
run: |
pip install pyyaml requests

- name: Run SAST scan
id: sast
run: |
# Run Semgrep
docker run --rm -v "${PWD}:/src" \
returntocorp/semgrep semgrep \
--config=auto \
--json \
--output=/src/sast-results.json \
/src

- name: Run SCA scan
id: sca
run: |
# Run Snyk
npx snyk test \
--json \
--severity-threshold=high \
> sca-results.json || true

- name: Run secrets scan
id: secrets
run: |
# Run TruffleHog
docker run --rm -v "${PWD}:/scan" \
trufflesecurity/trufflehog:latest \
filesystem /scan \
--json \
> secrets-results.json || true

- name: Aggregate security findings
id: aggregate
run: |
python3 scripts/security/aggregate-findings.py \
--sast sast-results.json \
--sca sca-results.json \
--secrets secrets-results.json \
--output aggregated-findings.json

- name: Check security exceptions
id: exceptions
run: |
python3 scripts/security/check-exceptions.py \
--findings aggregated-findings.json \
--exceptions .security/exceptions.yaml \
--output findings-with-exceptions.json

- name: Security gate decision
id: gate-decision
run: |
python3 scripts/security/security-gate.py \
--findings findings-with-exceptions.json \
--policy .security/gate-policy.yaml \
--output gate-decision.json

# Set outputs
STATUS=$(jq -r '.status' gate-decision.json)
SUMMARY=$(jq -r '.summary' gate-decision.json)

echo "status=$STATUS" >> $GITHUB_OUTPUT
echo "summary=$SUMMARY" >> $GITHUB_OUTPUT

# Fail if gate blocks
if [ "$STATUS" = "BLOCKED" ]; then
echo "::error::Security gate BLOCKED deployment"
exit 1
fi

- name: Comment on PR
if: github.event_name == 'pull_request'
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const decision = JSON.parse(fs.readFileSync('gate-decision.json', 'utf8'));

const status = decision.status === 'PASSED' ? '✅' : '❌';
const summary = decision.summary;

const findingsByType = decision.findings_by_type || {};
const findingsBySeverity = decision.findings_by_severity || {};

let body = `## Security Gate ${status} ${decision.status}\n\n`;
body += `${summary}\n\n`;
body += `### Findings Summary\n\n`;
body += `| Type | Critical | High | Medium | Low |\n`;
body += `|------|----------|------|--------|-----|\n`;

for (const [type, counts] of Object.entries(findingsByType)) {
body += `| ${type} | ${counts.critical || 0} | ${counts.high || 0} | ${counts.medium || 0} | ${counts.low || 0} |\n`;
}

if (decision.status === 'BLOCKED') {
body += `\n### ⚠️ Blocking Issues\n\n`;
for (const issue of decision.blocking_issues || []) {
body += `- **${issue.severity}**: ${issue.title} (${issue.type})\n`;
body += ` - ${issue.description}\n`;
body += ` - Remediation: ${issue.remediation}\n\n`;
}

body += `\n**To proceed:**\n`;
body += `1. Remediate blocking issues and push fixes\n`;
body += `2. OR request security exception: \`/security-exception <finding-id> "<justification>"\`\n`;
}

github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: body
});

- name: Update commit status
if: always()
uses: actions/github-script@v7
with:
script: |
const fs = require('fs');
const decision = JSON.parse(fs.readFileSync('gate-decision.json', 'utf8'));

const state = decision.status === 'PASSED' ? 'success' :
decision.status === 'CONDITIONAL' ? 'success' :
'failure';

github.rest.repos.createCommitStatus({
owner: context.repo.owner,
repo: context.repo.repo,
sha: context.sha,
state: state,
target_url: `https://github.com/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`,
description: decision.summary,
context: 'security-gate'
});

- name: Upload security reports
if: always()
uses: actions/upload-artifact@v4
with:
name: security-gate-reports
path: |
sast-results.json
sca-results.json
secrets-results.json
aggregated-findings.json
findings-with-exceptions.json
gate-decision.json
retention-days: 90

Security Gate Policy

# .security/gate-policy.yaml
---
version: "1.0"
name: "BIO-QMS Security Gate Policy"
description: "Security gate enforcement policy for CI/CD pipeline"

# Severity-based blocking rules
blocking_rules:
# SAST findings
sast:
critical:
action: BLOCK
reason: "Critical SAST findings must be remediated before deployment"
exceptions_allowed: false
high:
action: BLOCK
reason: "High severity SAST findings require remediation or security review"
exceptions_allowed: true
approval_required:
- security-team
- engineering-lead

# SCA (dependency vulnerabilities)
sca:
critical:
action: BLOCK
cvss_threshold: 9.0
reason: "Critical vulnerabilities in dependencies (CVSS >= 9.0)"
exceptions_allowed: true
exception_max_duration_days: 30
approval_required:
- security-team
- cto
high:
action: CONDITIONAL
cvss_threshold: 7.0
reason: "High severity vulnerabilities (CVSS >= 7.0)"
exceptions_allowed: true
exception_max_duration_days: 90
conditions:
- "Remediation plan documented"
- "Security team notified"

# Secrets exposure
secrets:
any:
action: BLOCK
reason: "No secrets or credentials may be committed to repository"
exceptions_allowed: false

# License compliance
license:
prohibited:
action: BLOCK
licenses:
- "GPL-2.0"
- "GPL-3.0"
- "AGPL-3.0"
reason: "Prohibited license detected"
exceptions_allowed: true
approval_required:
- legal-team
- cto

# Compliance-specific rules
compliance_rules:
fda_21_cfr_part_11:
enabled: true
findings:
- rule_id: "electronic-signature-missing-validation"
severity: ERROR
action: BLOCK
- rule_id: "audit-trail-missing-timestamp"
severity: ERROR
action: BLOCK

hipaa:
enabled: true
findings:
- rule_id: "phi-logging-detected"
severity: ERROR
action: BLOCK
- rule_id: "unencrypted-phi-storage"
severity: ERROR
action: BLOCK

soc2:
enabled: true
findings:
- rule_id: "password-in-source-code"
severity: ERROR
action: BLOCK
- rule_id: "missing-access-control"
severity: ERROR
action: BLOCK

# Exception workflow
exception_workflow:
enabled: true
approval_process:
- step: 1
name: "Security Team Review"
approvers:
- "@security-team"
required_approvals: 1

- step: 2
name: "Risk Assessment"
approvers:
- "@ciso"
- "@cto"
required_approvals: 1
required_for_severities:
- CRITICAL
- HIGH

documentation_required:
- "Business justification"
- "Risk assessment"
- "Compensating controls"
- "Remediation timeline"

auto_expire: true
default_expiration_days: 30
renewal_allowed: true
max_renewals: 2

# Metrics and reporting
reporting:
metrics:
- "gate_pass_rate"
- "findings_by_severity"
- "findings_by_type"
- "exception_count"
- "mean_time_to_remediation"

notifications:
slack:
webhook_url: "${SECURITY_SLACK_WEBHOOK}"
on_block: true
on_exception_request: true

pagerduty:
integration_key: "${PAGERDUTY_SECURITY_KEY}"
on_critical_finding: true

# Allowlist for known false positives
allowlist:
- finding_id: "semgrep-rule-123"
reason: "Test data pattern, not actual vulnerability"
approved_by: "security-team"
approved_date: "2026-01-15"
expires: "2026-07-15"

Security Gate Decision Engine

# scripts/security/security-gate.py
"""
Security Gate Decision Engine

Evaluates security findings against policy and makes gate pass/fail decision.
Compliance: SOC 2 CC7.3 (Change Management - Security Controls)
"""

import json
import yaml
from datetime import datetime, timedelta
from typing import Dict, List, Optional
from pathlib import Path
from dataclasses import dataclass, field


@dataclass
class Finding:
"""Security finding"""
id: str
type: str # sast, sca, secrets, license
severity: str # CRITICAL, HIGH, MEDIUM, LOW
title: str
description: str
remediation: Optional[str] = None
cvss_score: Optional[float] = None
cwe: Optional[str] = None
file_path: Optional[str] = None
line_number: Optional[int] = None
exception_id: Optional[str] = None
exception_reason: Optional[str] = None


@dataclass
class GateDecision:
"""Security gate decision"""
status: str # PASSED, CONDITIONAL, BLOCKED
summary: str
findings_count: int
findings_by_severity: Dict[str, int] = field(default_factory=dict)
findings_by_type: Dict[str, Dict[str, int]] = field(default_factory=dict)
blocking_issues: List[Dict] = field(default_factory=list)
warnings: List[Dict] = field(default_factory=list)
exceptions_applied: List[Dict] = field(default_factory=list)
timestamp: str = field(default_factory=lambda: datetime.utcnow().isoformat())


class SecurityGateEngine:
"""Evaluate security findings and make gate decisions"""

def __init__(self, policy_path: Path):
with open(policy_path) as f:
self.policy = yaml.safe_load(f)

def evaluate(self, findings: List[Finding]) -> GateDecision:
"""Evaluate findings against policy and make gate decision"""

# Categorize findings
findings_by_severity = {'CRITICAL': 0, 'HIGH': 0, 'MEDIUM': 0, 'LOW': 0}
findings_by_type = {}
blocking_issues = []
warnings = []
exceptions_applied = []

for finding in findings:
# Count by severity
findings_by_severity[finding.severity] = findings_by_severity.get(finding.severity, 0) + 1

# Count by type
if finding.type not in findings_by_type:
findings_by_type[finding.type] = {'CRITICAL': 0, 'HIGH': 0, 'MEDIUM': 0, 'LOW': 0}
findings_by_type[finding.type][finding.severity] += 1

# Check if finding has exception
if finding.exception_id:
exceptions_applied.append({
'finding_id': finding.id,
'exception_id': finding.exception_id,
'reason': finding.exception_reason
})
continue

# Evaluate against blocking rules
blocking_rule = self._get_blocking_rule(finding)
if blocking_rule:
if blocking_rule['action'] == 'BLOCK':
blocking_issues.append({
'severity': finding.severity,
'type': finding.type,
'title': finding.title,
'description': finding.description,
'remediation': finding.remediation or 'Contact security team',
'rule_reason': blocking_rule['reason']
})
elif blocking_rule['action'] == 'CONDITIONAL':
warnings.append({
'severity': finding.severity,
'type': finding.type,
'title': finding.title,
'conditions': blocking_rule.get('conditions', [])
})

# Make decision
if len(blocking_issues) > 0:
status = 'BLOCKED'
summary = f"Security gate BLOCKED: {len(blocking_issues)} blocking issue(s) found"
elif len(warnings) > 0:
status = 'CONDITIONAL'
summary = f"Security gate CONDITIONAL: {len(warnings)} warning(s) require attention"
else:
status = 'PASSED'
summary = f"Security gate PASSED: No blocking issues found"

return GateDecision(
status=status,
summary=summary,
findings_count=len(findings),
findings_by_severity=findings_by_severity,
findings_by_type=findings_by_type,
blocking_issues=blocking_issues,
warnings=warnings,
exceptions_applied=exceptions_applied
)

def _get_blocking_rule(self, finding: Finding) -> Optional[Dict]:
"""Get blocking rule for finding type and severity"""
rules = self.policy.get('blocking_rules', {})

# Check type-specific rules
type_rules = rules.get(finding.type, {})

severity_key = finding.severity.lower()
if severity_key in type_rules:
rule = type_rules[severity_key]

# Check CVSS threshold for SCA findings
if finding.type == 'sca' and finding.cvss_score:
threshold = rule.get('cvss_threshold', 0)
if finding.cvss_score >= threshold:
return rule

# For other types, return rule directly
elif finding.type != 'sca':
return rule

# Check compliance-specific rules
compliance_rules = self.policy.get('compliance_rules', {})
for compliance, config in compliance_rules.items():
if not config.get('enabled'):
continue

for rule_finding in config.get('findings', []):
if finding.id.startswith(rule_finding['rule_id']):
return {
'action': rule_finding['action'],
'reason': f"Compliance violation: {compliance}",
'exceptions_allowed': False
}

return None


if __name__ == '__main__':
import argparse

parser = argparse.ArgumentParser(description='Security Gate Decision Engine')
parser.add_argument('--findings', required=True, help='Findings JSON file')
parser.add_argument('--policy', required=True, help='Gate policy YAML file')
parser.add_argument('--output', default='gate-decision.json', help='Output decision file')

args = parser.parse_args()

# Load findings
with open(args.findings) as f:
findings_data = json.load(f)

findings = [
Finding(
id=f['id'],
type=f['type'],
severity=f['severity'],
title=f['title'],
description=f.get('description', ''),
remediation=f.get('remediation'),
cvss_score=f.get('cvss_score'),
cwe=f.get('cwe'),
file_path=f.get('file_path'),
line_number=f.get('line_number'),
exception_id=f.get('exception_id'),
exception_reason=f.get('exception_reason')
)
for f in findings_data.get('findings', [])
]

# Evaluate
engine = SecurityGateEngine(Path(args.policy))
decision = engine.evaluate(findings)

# Save decision
from dataclasses import asdict
with open(args.output, 'w') as f:
json.dump(asdict(decision), f, indent=2)

print(f"Gate Decision: {decision.status}")
print(f"Summary: {decision.summary}")
print(f"Blocking Issues: {len(decision.blocking_issues)}")

# Exit code based on decision
if decision.status == 'BLOCKED':
exit(1)
else:
exit(0)

Due to the output token limit, I've created Part 2 of the security operations document. Let me now create a consolidated final version that merges both parts and completes all remaining sections (M.2-M.5). Shall I continue with the final consolidated document?