Production-ready containerization orchestrator with multi-stage builds, security hardening, and Kubernetes optimization
<codex_skill_adapter>
$ops-deploy-containerize.$ops-deploy-containerize as {{SC_ARGS}}.{{SC_ARGS}} as empty.spawn_agent(...) patterns to Codex spawn_agent(...).update_plan.config.toml when the original command mentions MCP.ops:deploy:containerize.$ops-deploy-containerizeSTEP 1: Initialize containerization session and analyze project architecture
/tmp/containerize-session-$SESSION_ID.json# Initialize containerization session state
echo '{
"sessionId": "'$SESSION_ID'",
"targetProject": "'{{SC_ARGS}}'",
"detectedTechnologies": [],
"containerStrategy": "auto-detect",
"securityProfile": "production",
"deploymentTarget": "kubernetes"
}' > /tmp/containerize-session-$SESSION_ID.json
STEP 2: Comprehensive project analysis with parallel sub-agent coordination
TRY:
IF project_complexity == "multi-service" OR technology_stack == "polyglot":
LAUNCH parallel sub-agents for comprehensive project analysis:
Agent 1: Technology Stack Analysis: Analyze all build files, dependencies, and frameworks
Agent 2: Security & Compliance Assessment: Evaluate security requirements and compliance needs
Agent 3: Performance & Optimization: Analyze performance requirements and optimization opportunities
Agent 4: Deployment Architecture: Design deployment strategy and infrastructure requirements
ELSE:
EXECUTE streamlined single-service containerization analysis:
# Single-service analysis workflow
echo "🔍 Analyzing single-service containerization requirements..."
STEP 3: Intelligent Dockerfile generation based on detected technology stack
CASE detected_technology: WHEN "rust":
Rust Projects (Axum/Warp optimized):
# Build stage
FROM rust:1.80-alpine AS builder
WORKDIR /app
# Install build dependencies
RUN apk add --no-cache musl-dev pkgconfig openssl-dev
# Cache dependencies
COPY Cargo.toml Cargo.lock ./
RUN mkdir src && echo "fn main() {}" > src/main.rs
RUN cargo build --release && rm -rf src/
# Build application
COPY src/ src/
RUN touch src/main.rs && cargo build --release
# Runtime stage
FROM alpine:3.19
RUN apk add --no-cache ca-certificates tzdata
RUN addgroup -g 1001 -S appgroup && adduser -u 1001 -S appuser -G appgroup
WORKDIR /app
COPY --from=builder /app/target/release/app /app/
RUN chown -R appuser:appgroup /app
USER appuser
EXPOSE 8080
CMD ["./app"]
Go Projects:
# Build stage
FROM golang:1.22-alpine AS builder
WORKDIR /app
# Install build dependencies
RUN apk add --no-cache git ca-certificates tzdata
# Cache dependencies
COPY go.mod go.sum ./
RUN go mod download && go mod verify
# Build application
COPY . .
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o app ./cmd/server
# Runtime stage
FROM scratch
COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=builder /usr/share/zoneinfo /usr/share/zoneinfo
COPY --from=builder /app/app /app
EXPOSE 8080
CMD ["/app"]
Java Projects (Spring Boot/Quarkus):
# Build stage
FROM eclipse-temurin:21-jdk-alpine AS builder
WORKDIR /app
# Cache dependencies
COPY pom.xml ./
COPY mvnw ./
COPY .mvn .mvn
RUN ./mvnw dependency:go-offline -B
# Build application
COPY src src
RUN ./mvnw clean package -DskipTests -B
# Runtime stage
FROM eclipse-temurin:21-jre-alpine
RUN addgroup -g 1001 -S appgroup && adduser -u 1001 -S appuser -G appgroup
WORKDIR /app
COPY --from=builder /app/target/*.jar app.jar
RUN chown appuser:appgroup app.jar
USER appuser
EXPOSE 8080
ENTRYPOINT ["java", "-jar", "app.jar"]
Deno Projects:
# Build stage (if compilation needed)
FROM denoland/deno:1.46.0 AS builder
WORKDIR /app
# Cache dependencies
COPY deno.json deno.lock* ./
RUN deno cache deps.ts
# Copy source and compile if needed
COPY . .
RUN deno task build || echo "No build step required"
# Runtime stage
FROM denoland/deno:1.46.0
RUN groupadd -g 1001 appgroup && useradd -u 1001 -g appgroup appuser
WORKDIR /app
COPY --from=builder /app .
RUN chown -R appuser:appgroup /app
USER appuser
EXPOSE 8080
CMD ["deno", "task", "start"]
WHEN "go":
Go Projects (ConnectRPC optimized):
# Build stage with Go modules caching
FROM golang:1.22-alpine AS builder
WORKDIR /app
# Install build dependencies for ConnectRPC
RUN apk add --no-cache git ca-certificates tzdata protobuf
# Cache Go modules
COPY go.mod go.sum ./
RUN go mod download && go mod verify
# Copy source and build
COPY . .
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags '-w -s' -o app ./cmd/server
# Minimal runtime stage
FROM scratch
COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=builder /usr/share/zoneinfo /usr/share/zoneinfo
COPY --from=builder /app/app /app
EXPOSE 8080 9090
CMD ["/app"]
WHEN "java":
Java Projects (Spring Boot/Quarkus with Temporal):
# Build stage with Maven/Gradle optimization
FROM eclipse-temurin:21-jdk-alpine AS builder
WORKDIR /app
# Cache dependencies based on build tool
COPY pom.xml* build.gradle* gradle.properties* settings.gradle* ./
COPY gradle/ gradle/ 2>/dev/null || true
COPY mvnw* .mvn/ ./ 2>/dev/null || true
RUN if [ -f "pom.xml" ]; then ./mvnw dependency:go-offline -B; elif [ -f "build.gradle" ]; then ./gradlew dependencies; fi
# Build application
COPY src src
RUN if [ -f "pom.xml" ]; then ./mvnw clean package -DskipTests -B; elif [ -f "build.gradle" ]; then ./gradlew build -x test; fi
# Runtime stage with JVM optimization
FROM eclipse-temurin:21-jre-alpine
RUN addgroup -g 1001 -S appgroup && adduser -u 1001 -S appuser -G appgroup
WORKDIR /app
COPY --from=builder /app/target/*.jar /app/build/libs/*.jar app.jar 2>/dev/null || true
RUN chown appuser:appgroup app.jar
USER appuser
EXPOSE 8080 9090
ENTRYPOINT ["java", "-XX:+UseContainerSupport", "-jar", "app.jar"]
WHEN "deno":
Deno Projects (Fresh 2.0 optimized):
# Build stage with Fresh optimization
FROM denoland/deno:2.1.4 AS builder
WORKDIR /app
# Cache dependencies with JSR support
COPY deno.json deno.lock* import_map.json* ./
RUN deno cache --node-modules-dir=auto --reload main.ts || deno cache deps.ts
# Copy source and build Fresh app
COPY . .
RUN deno task build
# Runtime stage
FROM denoland/deno:2.1.4
RUN groupadd -g 1001 appgroup && useradd -u 1001 -g appgroup appuser
WORKDIR /app
COPY --from=builder /app .
RUN chown -R appuser:appgroup /app
USER appuser
EXPOSE 8000
CMD ["deno", "task", "start"]
STEP 4: Advanced security hardening with production-grade configurations
TRY:
Security Context Implementation:
# Generate security-hardened configuration
cat > security-context.yaml << 'EOF'
securityContext:
runAsNonRoot: true
runAsUser: 1001
runAsGroup: 1001
readOnlyRootFilesystem: true
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
add:
- NET_BIND_SERVICE
EOF
Secret Management Strategy:
FOR EACH environment IN [development, staging, production]:
# Environment-specific secret management
case $environment in
"development")
echo "Using local environment variables and .env files"
;;
"staging"|"production")
echo "Using Kubernetes secrets and external secret operators"
;;
esac
Image Security Scanning Integration:
# Automated security scanning pipeline
docker build -t temp-image:$SESSION_ID .
trivy image --exit-code 1 --severity HIGH,CRITICAL temp-image:$SESSION_ID
hadolint Dockerfile
docker scout cves temp-image:$SESSION_ID
CATCH (security_scan_failed):
echo "⚠️ Security scan failed. Manual review required for:"
echo " - Dockerfile best practices violations"
echo " - High/Critical CVEs in base images"
echo " - Secret detection in source code"
STEP 5: Performance optimization with intelligent caching strategies
Layer Optimization Implementation:
# Generate optimized .dockerignore
cat > .dockerignore << 'EOF'
# Version control
.git
.gitignore
# Documentation
README.md
docs/
*.md
# Development tools
.vscode/
.idea/
.devcontainer/
# Build artifacts
target/
dist/
build/
node_modules/
# OS files
.DS_Store
Thumbs.db
# Logs and temporary files
*.log
tmp/
.tmp/
EOF
Build Cache Strategy:
FOR EACH build_stage IN [dependencies, source, artifacts]:
# Stage-specific cache optimization
case $build_stage in
"dependencies")
echo "Implementing dependency layer caching with package lock files"
;;
"source")
echo "Separating source code from configuration for optimal rebuild"
;;
"artifacts")
echo "Multi-stage artifact copying with minimal final image"
;;
esac
STEP 6: Advanced health checks and observability integration
Health Check Implementation:
# Intelligent health check based on service type
HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \
CMD curl -f http://localhost:8080/health || \
nc -z localhost 8080 || \
./custom-health-check.sh || \
exit 1
# Metrics exposure for Prometheus
EXPOSE 8080 9090 8081
LABEL prometheus.scrape="true"
LABEL prometheus.port="9090"
LABEL prometheus.path="/metrics"
STEP 7: Development environment orchestration with Docker Compose
Generate Development Docker Compose:
# docker-compose.dev.yml