Remove validation package and update Dockerfiles for BFF and Portal

- Deleted the @customer-portal/validation package to streamline dependencies.
- Updated Dockerfiles for BFF and Portal to reflect changes in package structure and optimize build processes.
- Adjusted import statements in BFF controllers to use the new Zod validation approach.
- Enhanced entrypoint script in BFF to include database and cache readiness checks before application startup.
- Cleaned up .gitignore to ignore unnecessary files and maintain clarity in project structure.
This commit is contained in:
barsa 2025-12-02 11:06:54 +09:00
parent 68561fdf1d
commit dc9a5d1448
47 changed files with 1213 additions and 425 deletions

83
.dockerignore Normal file
View File

@ -0,0 +1,83 @@
# =============================================================================
# Docker Build Ignore - Reduce context size for faster builds
# =============================================================================
# Dependencies (installed fresh in containers)
node_modules/
**/node_modules/
# Build outputs (built in container)
dist/
**/dist/
.next/
**/.next/
.turbo/
**/.turbo/
# Git
.git/
.gitignore
# IDE and editors
.idea/
.vscode/
*.swp
*.swo
.DS_Store
# Environment files (secrets passed via env vars)
.env
.env.*
!.env.example
env/
# Logs
logs/
*.log
npm-debug.log*
pnpm-debug.log*
# Test and coverage
coverage/
.nyc_output/
*.test.ts
*.spec.ts
**/__tests__/
# Documentation (not needed in runtime)
docs/
*.md
!README.md
# Development tools
.husky/
.eslintcache
tsconfig.tsbuildinfo
**/tsconfig.tsbuildinfo
# Docker artifacts (don't send to context)
*.tar
*.tar.gz
*.sha256
.build-logs/
# Secrets (never include)
secrets/
*.pem
*.key
*.cert
# Misc
tmp/
temp/
.cache/
Thumbs.db
# Sim manager migration (one-time tool)
sim-manager-migration/
# Build artifacts already saved
portal-frontend.*.tar*
portal-backend.*.tar*
build-output.log

View File

@ -1 +0,0 @@
CSRF_SECRET_KEY=your-secure-csrf-secret-key-minimum-32-characters-long-for-development

6
.gitignore vendored
View File

@ -140,9 +140,9 @@ temp/
*.sqlite *.sqlite
*.db *.db
# Docker # Docker - keep configs but ignore secrets
.dockerignore docker/portainer/stack.env
docker/ .build-logs/
# Prisma # Prisma
prisma/migrations/dev.db* prisma/migrations/dev.db*

View File

@ -1,122 +1,106 @@
# 🚀 Backend (BFF) Dockerfile - Production Grade (pnpm v10) # =============================================================================
# - Uses pnpm's injected workspace packages (no legacy flags) # Backend (BFF) Dockerfile - Production Grade
# - pnpm deploy creates minimal production-only install # =============================================================================
# - Prisma + bcrypt built only for Alpine # Uses Alpine throughout for consistent native module builds
# - No redundant installs # Uses pnpm prune instead of pnpm deploy to preserve Prisma client
# =============================================================================
# ===================================================== ARG PNPM_VERSION=10.15.0
# Stage 1: Dependencies (Debian for native builds) ARG NODE_VERSION=22
# =====================================================
FROM node:22-bookworm-slim AS deps
RUN apt-get update && apt-get install -y dumb-init ca-certificates \ # =============================================================================
&& rm -rf /var/lib/apt/lists/* \ # Stage 1: Builder - Install, build, and prune for production
&& corepack enable && corepack prepare pnpm@10.15.0 --activate # =============================================================================
FROM node:${NODE_VERSION}-alpine AS builder
ARG PNPM_VERSION
# Install build dependencies for native modules (bcrypt, prisma)
RUN apk add --no-cache \
python3 make g++ pkgconfig openssl-dev libc6-compat \
&& corepack enable \
&& corepack prepare pnpm@${PNPM_VERSION} --activate
WORKDIR /app WORKDIR /app
# Copy workspace configuration first (better layer caching)
COPY .npmrc pnpm-workspace.yaml package.json pnpm-lock.yaml ./ COPY .npmrc pnpm-workspace.yaml package.json pnpm-lock.yaml ./
COPY packages/domain/package.json ./packages/domain/ COPY packages/domain/package.json ./packages/domain/
COPY packages/logging/package.json ./packages/logging/ COPY packages/logging/package.json ./packages/logging/
COPY packages/validation/package.json ./packages/validation/
COPY apps/bff/package.json ./apps/bff/ COPY apps/bff/package.json ./apps/bff/
RUN pnpm install --frozen-lockfile --prefer-offline --config.ignore-scripts=false # Install all dependencies
ENV HUSKY=0
RUN pnpm install --frozen-lockfile
# ===================================================== # Copy source code
# Stage 2: Builder (compile TypeScript) COPY tsconfig.json tsconfig.base.json ./
# =====================================================
FROM node:22-bookworm-slim AS builder
RUN apt-get update && apt-get install -y ca-certificates \
&& rm -rf /var/lib/apt/lists/* \
&& corepack enable && corepack prepare pnpm@10.15.0 --activate
WORKDIR /app
COPY .npmrc pnpm-workspace.yaml package.json pnpm-lock.yaml tsconfig.json tsconfig.base.json ./
COPY packages/ ./packages/ COPY packages/ ./packages/
COPY apps/bff/ ./apps/bff/ COPY apps/bff/ ./apps/bff/
COPY --from=deps /app/node_modules ./node_modules
# No second pnpm install reuse deps layer # Build workspace packages
RUN pnpm --filter @customer-portal/domain build && \
pnpm --filter @customer-portal/logging build
# Build shared packages # Generate Prisma client (for Alpine/musl)
RUN pnpm --filter @customer-portal/domain build \ RUN cd apps/bff && pnpm exec prisma generate
&& pnpm --filter @customer-portal/logging build \
&& pnpm --filter @customer-portal/validation build
# Build BFF (prisma types generated in dev, not needed here) # Build BFF
RUN pnpm --filter @customer-portal/bff build RUN pnpm --filter @customer-portal/bff build
# ===================================================== # Prune dev dependencies IN PLACE - this keeps .prisma/client intact
# Stage 3: Production Dependencies (Alpine, pnpm deploy) RUN pnpm prune --prod
# =====================================================
FROM node:22-alpine AS prod-deps
RUN corepack enable && corepack prepare pnpm@10.15.0 --activate # Remove unnecessary files to reduce image size
RUN rm -rf /app/packages/*/src /app/apps/bff/src \
/app/packages/*/*.ts /app/apps/bff/*.ts \
/app/**/*.map /app/**/*.tsbuildinfo \
/root/.local/share/pnpm/store
WORKDIR /app # =============================================================================
# Stage 2: Production - Minimal runtime image
# =============================================================================
FROM node:${NODE_VERSION}-alpine AS production
# Minimal manifests for dependency graph LABEL org.opencontainers.image.title="Customer Portal BFF" \
COPY .npmrc pnpm-workspace.yaml package.json pnpm-lock.yaml ./ org.opencontainers.image.description="NestJS Backend-for-Frontend API" \
COPY packages/domain/package.json ./packages/domain/ org.opencontainers.image.vendor="Customer Portal"
COPY packages/logging/package.json ./packages/logging/
COPY packages/validation/package.json ./packages/validation/
COPY apps/bff/package.json ./apps/bff/
COPY apps/bff/prisma ./apps/bff/prisma
ENV HUSKY=0 # Runtime dependencies only
RUN apk add --no-cache \
RUN apk add --no-cache --virtual .build-deps python3 make g++ pkgconfig openssl-dev \ dumb-init wget openssl netcat-openbsd libc6-compat \
# 1) Install full deps (needed for prisma CLI + bcrypt build)
&& pnpm install --frozen-lockfile --ignore-scripts \
# 2) Rebuild bcrypt for musl
&& pnpm rebuild bcrypt \
# 3) Generate Prisma client for Alpine (musl) the only runtime client
&& cd apps/bff && pnpm exec prisma generate && cd ../.. \
# 4) Create production-only deployment for BFF
&& pnpm deploy --filter @customer-portal/bff --prod /app/deploy \
# 5) Remove build-time node_modules and cleanup
&& rm -rf /app/node_modules /app/pnpm-lock.yaml \
/root/.cache /root/.npm /tmp/* /var/cache/apk/* \
&& apk del .build-deps
# /app/deploy now contains: package.json + node_modules for BFF prod deps only
# =====================================================
# Stage 4: Production Runtime (minimal)
# =====================================================
FROM node:22-alpine AS production
RUN addgroup --system --gid 1001 nodejs \
&& adduser --system --uid 1001 nestjs
# Only tools needed at runtime
RUN apk add --no-cache wget dumb-init openssl netcat-openbsd \
&& rm -rf /var/cache/apk/* && rm -rf /var/cache/apk/*
# Create non-root user
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 nestjs
WORKDIR /app WORKDIR /app
# Deploy tree (prod deps for BFF only) # Copy pruned node_modules (includes .prisma/client)
COPY --from=prod-deps --chown=nestjs:nodejs /app/deploy ./ COPY --from=builder --chown=nestjs:nodejs /app/node_modules ./node_modules
# Compiled code and prisma schema # Copy workspace package outputs
COPY --from=builder --chown=nestjs:nodejs /app/packages/domain/dist ./packages/domain/dist COPY --from=builder --chown=nestjs:nodejs /app/packages/domain/dist ./packages/domain/dist
COPY --from=builder --chown=nestjs:nodejs /app/packages/domain/package.json ./packages/domain/package.json
COPY --from=builder --chown=nestjs:nodejs /app/packages/logging/dist ./packages/logging/dist COPY --from=builder --chown=nestjs:nodejs /app/packages/logging/dist ./packages/logging/dist
COPY --from=builder --chown=nestjs:nodejs /app/packages/validation/dist ./packages/validation/dist COPY --from=builder --chown=nestjs:nodejs /app/packages/logging/package.json ./packages/logging/package.json
# Copy BFF
COPY --from=builder --chown=nestjs:nodejs /app/apps/bff/dist ./apps/bff/dist COPY --from=builder --chown=nestjs:nodejs /app/apps/bff/dist ./apps/bff/dist
COPY --from=builder --chown=nestjs:nodejs /app/apps/bff/prisma ./apps/bff/prisma COPY --from=builder --chown=nestjs:nodejs /app/apps/bff/prisma ./apps/bff/prisma
COPY --from=builder --chown=nestjs:nodejs /app/apps/bff/package.json ./apps/bff/package.json
# Entrypoint and runtime dirs # Copy entrypoint
COPY --chown=nestjs:nodejs apps/bff/scripts/docker-entrypoint.sh /app/docker-entrypoint.sh COPY --chown=nestjs:nodejs apps/bff/scripts/docker-entrypoint.sh /app/docker-entrypoint.sh
RUN chmod +x /app/docker-entrypoint.sh \ RUN chmod +x /app/docker-entrypoint.sh && \
&& mkdir -p /app/secrets /app/logs \ mkdir -p /app/secrets /app/logs && \
&& chown nestjs:nodejs /app/secrets /app/logs chown nestjs:nodejs /app/secrets /app/logs
USER nestjs USER nestjs
EXPOSE 4000 EXPOSE 4000
ENV NODE_ENV=production PORT=4000 ENV NODE_ENV=production PORT=4000
WORKDIR /app/apps/bff WORKDIR /app/apps/bff

View File

@ -33,7 +33,6 @@
"dependencies": { "dependencies": {
"@customer-portal/domain": "workspace:*", "@customer-portal/domain": "workspace:*",
"@customer-portal/logging": "workspace:*", "@customer-portal/logging": "workspace:*",
"@customer-portal/validation": "workspace:*",
"@nestjs/bullmq": "^11.0.3", "@nestjs/bullmq": "^11.0.3",
"@nestjs/common": "^11.1.6", "@nestjs/common": "^11.1.6",
"@nestjs/config": "^4.0.2", "@nestjs/config": "^4.0.2",

View File

@ -5,8 +5,9 @@ set -e
# Docker Entrypoint Script # Docker Entrypoint Script
# ============================================================================= # =============================================================================
# Handles runtime setup before starting the application: # Handles runtime setup before starting the application:
# - Waits for database and cache dependencies
# - Decodes SF_PRIVATE_KEY_BASE64 to file if provided # - Decodes SF_PRIVATE_KEY_BASE64 to file if provided
# - Runs Prisma migrations if DATABASE_URL is set # - Runs Prisma migrations if RUN_MIGRATIONS=true
# ============================================================================= # =============================================================================
echo "🚀 Starting Customer Portal Backend..." echo "🚀 Starting Customer Portal Backend..."
@ -21,6 +22,34 @@ if [ -n "$SF_PRIVATE_KEY_BASE64" ]; then
echo "✅ Salesforce private key configured" echo "✅ Salesforce private key configured"
fi fi
# Wait for database if DATABASE_URL is set
# Extract host:port from postgresql://user:pass@host:port/db
if [ -n "$DATABASE_URL" ]; then
DB_HOST=$(echo "$DATABASE_URL" | sed -E 's|.*@([^:/]+):([0-9]+)/.*|\1|')
DB_PORT=$(echo "$DATABASE_URL" | sed -E 's|.*@([^:/]+):([0-9]+)/.*|\2|')
if [ -n "$DB_HOST" ] && [ -n "$DB_PORT" ]; then
echo "⏳ Waiting for database ($DB_HOST:$DB_PORT)..."
until nc -z "$DB_HOST" "$DB_PORT" 2>/dev/null; do
sleep 2
done
echo "✅ Database is ready"
fi
fi
# Wait for Redis if REDIS_URL is set
# Extract host:port from redis://host:port/db
if [ -n "$REDIS_URL" ]; then
REDIS_HOST=$(echo "$REDIS_URL" | sed -E 's|redis://([^:/]+):([0-9]+).*|\1|')
REDIS_PORT=$(echo "$REDIS_URL" | sed -E 's|redis://([^:/]+):([0-9]+).*|\2|')
if [ -n "$REDIS_HOST" ] && [ -n "$REDIS_PORT" ]; then
echo "⏳ Waiting for cache ($REDIS_HOST:$REDIS_PORT)..."
until nc -z "$REDIS_HOST" "$REDIS_PORT" 2>/dev/null; do
sleep 2
done
echo "✅ Cache is ready"
fi
fi
# Run database migrations if enabled # Run database migrations if enabled
if [ "$RUN_MIGRATIONS" = "true" ] && [ -n "$DATABASE_URL" ]; then if [ "$RUN_MIGRATIONS" = "true" ] && [ -n "$DATABASE_URL" ]; then
echo "🗄️ Running database migrations..." echo "🗄️ Running database migrations..."

View File

@ -1,10 +1,9 @@
import { Module } from "@nestjs/common"; import { Module } from "@nestjs/common";
import { APP_FILTER, APP_PIPE } from "@nestjs/core"; import { APP_PIPE } from "@nestjs/core";
import { RouterModule } from "@nestjs/core"; import { RouterModule } from "@nestjs/core";
import { ConfigModule, ConfigService } from "@nestjs/config"; import { ConfigModule, ConfigService } from "@nestjs/config";
import { ThrottlerModule } from "@nestjs/throttler"; import { ThrottlerModule } from "@nestjs/throttler";
import { ZodValidationPipe } from "nestjs-zod"; import { ZodValidationPipe } from "nestjs-zod";
import { ZodValidationExceptionFilter } from "@customer-portal/validation/nestjs";
// Configuration // Configuration
import { appConfig } from "@bff/core/config/app.config"; import { appConfig } from "@bff/core/config/app.config";
@ -99,10 +98,6 @@ import { HealthModule } from "@bff/modules/health/health.module";
provide: APP_PIPE, provide: APP_PIPE,
useClass: ZodValidationPipe, useClass: ZodValidationPipe,
}, },
{
provide: APP_FILTER,
useClass: ZodValidationExceptionFilter,
},
], ],
}) })
export class AppModule {} export class AppModule {}

View File

@ -22,7 +22,7 @@ import {
} from "./guards/failed-login-throttle.guard"; } from "./guards/failed-login-throttle.guard";
import { LoginResultInterceptor } from "./interceptors/login-result.interceptor"; import { LoginResultInterceptor } from "./interceptors/login-result.interceptor";
import { Public } from "../../decorators/public.decorator"; import { Public } from "../../decorators/public.decorator";
import { ZodValidationPipe } from "@customer-portal/validation/nestjs"; import { ZodValidationPipe } from "nestjs-zod";
import type { RequestWithUser } from "@bff/modules/auth/auth.types"; import type { RequestWithUser } from "@bff/modules/auth/auth.types";
import { SalesforceReadThrottleGuard } from "@bff/integrations/salesforce/guards/salesforce-read-throttle.guard"; import { SalesforceReadThrottleGuard } from "@bff/integrations/salesforce/guards/salesforce-read-throttle.guard";
import { SalesforceWriteThrottleGuard } from "@bff/integrations/salesforce/guards/salesforce-write-throttle.guard"; import { SalesforceWriteThrottleGuard } from "@bff/integrations/salesforce/guards/salesforce-write-throttle.guard";

View File

@ -13,7 +13,7 @@ import {
import { InvoicesOrchestratorService } from "./services/invoices-orchestrator.service"; import { InvoicesOrchestratorService } from "./services/invoices-orchestrator.service";
import { WhmcsService } from "@bff/integrations/whmcs/whmcs.service"; import { WhmcsService } from "@bff/integrations/whmcs/whmcs.service";
import { MappingsService } from "@bff/modules/id-mappings/mappings.service"; import { MappingsService } from "@bff/modules/id-mappings/mappings.service";
import { ZodValidationPipe } from "@customer-portal/validation/nestjs"; import { ZodValidationPipe } from "nestjs-zod";
import type { RequestWithUser } from "@bff/modules/auth/auth.types"; import type { RequestWithUser } from "@bff/modules/auth/auth.types";
import type { import type {

View File

@ -1,6 +1,6 @@
import { Body, Controller, Post, Request, UsePipes, Inject, UseGuards } from "@nestjs/common"; import { Body, Controller, Post, Request, UsePipes, Inject, UseGuards } from "@nestjs/common";
import { Logger } from "nestjs-pino"; import { Logger } from "nestjs-pino";
import { ZodValidationPipe } from "@customer-portal/validation/nestjs"; import { ZodValidationPipe } from "nestjs-zod";
import { CheckoutService } from "../services/checkout.service"; import { CheckoutService } from "../services/checkout.service";
import { import {
CheckoutCart, CheckoutCart,

View File

@ -15,7 +15,7 @@ import { Throttle, ThrottlerGuard } from "@nestjs/throttler";
import { OrderOrchestrator } from "./services/order-orchestrator.service"; import { OrderOrchestrator } from "./services/order-orchestrator.service";
import type { RequestWithUser } from "@bff/modules/auth/auth.types"; import type { RequestWithUser } from "@bff/modules/auth/auth.types";
import { Logger } from "nestjs-pino"; import { Logger } from "nestjs-pino";
import { ZodValidationPipe } from "@customer-portal/validation/nestjs"; import { ZodValidationPipe } from "nestjs-zod";
import { import {
createOrderRequestSchema, createOrderRequestSchema,
orderCreateResponseSchema, orderCreateResponseSchema,

View File

@ -1,7 +1,7 @@
import { Body, Controller, Post, Request, UsePipes, Headers } from "@nestjs/common"; import { Body, Controller, Post, Request, UsePipes, Headers } from "@nestjs/common";
import type { RequestWithUser } from "@bff/modules/auth/auth.types"; import type { RequestWithUser } from "@bff/modules/auth/auth.types";
import { SimOrderActivationService } from "./sim-order-activation.service"; import { SimOrderActivationService } from "./sim-order-activation.service";
import { ZodValidationPipe } from "@customer-portal/validation/nestjs"; import { ZodValidationPipe } from "nestjs-zod";
import { import {
simOrderActivationRequestSchema, simOrderActivationRequestSchema,
type SimOrderActivationRequest, type SimOrderActivationRequest,

View File

@ -42,7 +42,7 @@ import {
type SimCancelFullRequest, type SimCancelFullRequest,
type SimChangePlanFullRequest, type SimChangePlanFullRequest,
} from "@customer-portal/domain/sim"; } from "@customer-portal/domain/sim";
import { ZodValidationPipe } from "@customer-portal/validation/nestjs"; import { ZodValidationPipe } from "nestjs-zod";
import type { RequestWithUser } from "@bff/modules/auth/auth.types"; import type { RequestWithUser } from "@bff/modules/auth/auth.types";
import { SimPlanService } from "./sim-management/services/sim-plan.service"; import { SimPlanService } from "./sim-management/services/sim-plan.service";
import { SimCancellationService } from "./sim-management/services/sim-cancellation.service"; import { SimCancellationService } from "./sim-management/services/sim-cancellation.service";

View File

@ -1,6 +1,6 @@
import { Controller, Get, Post, Query, Param, Body, Request } from "@nestjs/common"; import { Controller, Get, Post, Query, Param, Body, Request } from "@nestjs/common";
import { SupportService } from "./support.service"; import { SupportService } from "./support.service";
import { ZodValidationPipe } from "@customer-portal/validation/nestjs"; import { ZodValidationPipe } from "nestjs-zod";
import { import {
supportCaseFilterSchema, supportCaseFilterSchema,
createCaseRequestSchema, createCaseRequestSchema,

View File

@ -10,7 +10,7 @@ import {
UseGuards, UseGuards,
} from "@nestjs/common"; } from "@nestjs/common";
import { UsersFacade } from "./application/users.facade"; import { UsersFacade } from "./application/users.facade";
import { ZodValidationPipe } from "@customer-portal/validation/nestjs"; import { ZodValidationPipe } from "nestjs-zod";
import { import {
updateCustomerProfileRequestSchema, updateCustomerProfileRequestSchema,
type UpdateCustomerProfileRequest, type UpdateCustomerProfileRequest,

View File

@ -1,48 +1,43 @@
# 🚀 Frontend (Portal) Dockerfile - Plesk Optimized # =============================================================================
# Multi-stage build for Next.js production deployment via Plesk # Frontend (Portal) Dockerfile - Production Grade
# =============================================================================
# Uses Alpine throughout for consistency
# Next.js standalone output for minimal production image
# =============================================================================
# ===================================================== ARG PNPM_VERSION=10.15.0
# Stage 1: Dependencies - Install all dependencies ARG NODE_VERSION=22
# =====================================================
FROM node:22-alpine AS deps
RUN apk add --no-cache libc6-compat dumb-init \ # =============================================================================
&& corepack enable && corepack prepare pnpm@10.15.0 --activate # Stage 1: Builder - Install dependencies and build Next.js
# =============================================================================
FROM node:${NODE_VERSION}-alpine AS builder
ARG PNPM_VERSION
# Install build dependencies
RUN apk add --no-cache libc6-compat \
&& corepack enable \
&& corepack prepare pnpm@${PNPM_VERSION} --activate
WORKDIR /app WORKDIR /app
# Copy workspace configuration # Copy workspace configuration first (better layer caching)
COPY .npmrc pnpm-workspace.yaml package.json pnpm-lock.yaml ./ COPY .npmrc pnpm-workspace.yaml package.json pnpm-lock.yaml ./
COPY packages/domain/package.json ./packages/domain/ COPY packages/domain/package.json ./packages/domain/
COPY packages/validation/package.json ./packages/validation/
COPY apps/portal/package.json ./apps/portal/ COPY apps/portal/package.json ./apps/portal/
# Install all dependencies with scripts enabled (esbuild, sharp, etc.) # Install all dependencies
RUN pnpm install --frozen-lockfile --prefer-offline --config.ignore-scripts=false ENV HUSKY=0
RUN pnpm install --frozen-lockfile
# ===================================================== # Copy source code
# Stage 2: Builder - Compile and build Next.js COPY tsconfig.json tsconfig.base.json ./
# =====================================================
FROM node:22-alpine AS builder
RUN corepack enable && corepack prepare pnpm@10.15.0 --activate
WORKDIR /app
# Copy workspace configuration and source
COPY .npmrc pnpm-workspace.yaml package.json pnpm-lock.yaml tsconfig.json tsconfig.base.json ./
COPY packages/ ./packages/ COPY packages/ ./packages/
COPY apps/portal/ ./apps/portal/ COPY apps/portal/ ./apps/portal/
# Ensure public directory exists # Build domain package
RUN mkdir -p /app/apps/portal/public RUN pnpm --filter @customer-portal/domain build
# Copy pre-installed node_modules from deps
COPY --from=deps /app/node_modules ./node_modules
# Build shared packages
RUN pnpm --filter @customer-portal/domain build && \
pnpm --filter @customer-portal/validation build
# Build-time environment variables (baked into Next.js client bundle) # Build-time environment variables (baked into Next.js client bundle)
ARG NEXT_PUBLIC_API_BASE=/api ARG NEXT_PUBLIC_API_BASE=/api
@ -54,24 +49,33 @@ ENV NODE_ENV=production \
NEXT_PUBLIC_APP_NAME=${NEXT_PUBLIC_APP_NAME} \ NEXT_PUBLIC_APP_NAME=${NEXT_PUBLIC_APP_NAME} \
NEXT_PUBLIC_APP_VERSION=${NEXT_PUBLIC_APP_VERSION} NEXT_PUBLIC_APP_VERSION=${NEXT_PUBLIC_APP_VERSION}
WORKDIR /app/apps/portal # Build Next.js (creates standalone output)
RUN pnpm build RUN pnpm --filter @customer-portal/portal build
# ===================================================== # =============================================================================
# Stage 3: Production - Minimal Alpine runtime image # Stage 2: Production - Minimal runtime image
# ===================================================== # =============================================================================
FROM node:22-alpine AS production FROM node:${NODE_VERSION}-alpine AS production
RUN apk add --no-cache wget curl dumb-init libc6-compat \ LABEL org.opencontainers.image.title="Customer Portal Frontend" \
org.opencontainers.image.description="Customer Portal Application" \
org.opencontainers.image.vendor="Customer Portal"
# Runtime dependencies only
RUN apk add --no-cache \
dumb-init \
wget \
curl \
libc6-compat \
&& rm -rf /var/cache/apk/* && rm -rf /var/cache/apk/*
WORKDIR /app
# Create non-root user # Create non-root user
RUN addgroup --system --gid 1001 nodejs && \ RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 nextjs adduser --system --uid 1001 nextjs
# Copy Next.js standalone build WORKDIR /app
# Copy Next.js standalone build (includes all bundled dependencies)
COPY --from=builder --chown=nextjs:nodejs /app/apps/portal/.next/standalone ./ COPY --from=builder --chown=nextjs:nodejs /app/apps/portal/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/apps/portal/.next/static ./apps/portal/.next/static COPY --from=builder --chown=nextjs:nodejs /app/apps/portal/.next/static ./apps/portal/.next/static
COPY --from=builder --chown=nextjs:nodejs /app/apps/portal/public ./apps/portal/public COPY --from=builder --chown=nextjs:nodejs /app/apps/portal/public ./apps/portal/public

View File

@ -19,7 +19,6 @@
}, },
"dependencies": { "dependencies": {
"@customer-portal/domain": "workspace:*", "@customer-portal/domain": "workspace:*",
"@customer-portal/validation": "workspace:*",
"@heroicons/react": "^2.2.0", "@heroicons/react": "^2.2.0",
"@hookform/resolvers": "^5.2.1", "@hookform/resolvers": "^5.2.1",
"@tanstack/react-query": "^5.85.5", "@tanstack/react-query": "^5.85.5",

View File

@ -7,7 +7,7 @@ import {
addressFormToRequest, addressFormToRequest,
type AddressFormData, type AddressFormData,
} from "@customer-portal/domain/customer"; } from "@customer-portal/domain/customer";
import { useZodForm } from "@customer-portal/validation"; import { useZodForm } from "@/hooks/useZodForm";
export function useAddressEdit(initial: AddressFormData) { export function useAddressEdit(initial: AddressFormData) {
const handleSave = useCallback(async (formData: AddressFormData) => { const handleSave = useCallback(async (formData: AddressFormData) => {

View File

@ -9,7 +9,7 @@ import {
type ProfileEditFormData, type ProfileEditFormData,
} from "@customer-portal/domain/customer"; } from "@customer-portal/domain/customer";
import { type UpdateCustomerProfileRequest } from "@customer-portal/domain/auth"; import { type UpdateCustomerProfileRequest } from "@customer-portal/domain/auth";
import { useZodForm } from "@customer-portal/validation"; import { useZodForm } from "@/hooks/useZodForm";
export function useProfileEdit(initial: ProfileEditFormData) { export function useProfileEdit(initial: ProfileEditFormData) {
const handleSave = useCallback(async (formData: ProfileEditFormData) => { const handleSave = useCallback(async (formData: ProfileEditFormData) => {

View File

@ -9,7 +9,7 @@ import { Button, Input, ErrorMessage } from "@/components/atoms";
import { FormField } from "@/components/molecules/FormField/FormField"; import { FormField } from "@/components/molecules/FormField/FormField";
import { useWhmcsLink } from "@/features/auth/hooks"; import { useWhmcsLink } from "@/features/auth/hooks";
import { linkWhmcsRequestSchema, type LinkWhmcsResponse } from "@customer-portal/domain/auth"; import { linkWhmcsRequestSchema, type LinkWhmcsResponse } from "@customer-portal/domain/auth";
import { useZodForm } from "@customer-portal/validation"; import { useZodForm } from "@/hooks/useZodForm";
interface LinkWhmcsFormProps { interface LinkWhmcsFormProps {
onTransferred?: (result: LinkWhmcsResponse) => void; onTransferred?: (result: LinkWhmcsResponse) => void;

View File

@ -11,7 +11,7 @@ import { Button, Input, ErrorMessage } from "@/components/atoms";
import { FormField } from "@/components/molecules/FormField/FormField"; import { FormField } from "@/components/molecules/FormField/FormField";
import { useLogin } from "../../hooks/use-auth"; import { useLogin } from "../../hooks/use-auth";
import { loginRequestSchema } from "@customer-portal/domain/auth"; import { loginRequestSchema } from "@customer-portal/domain/auth";
import { useZodForm } from "@customer-portal/validation"; import { useZodForm } from "@/hooks/useZodForm";
import { z } from "zod"; import { z } from "zod";
interface LoginFormProps { interface LoginFormProps {

View File

@ -10,7 +10,7 @@ import Link from "next/link";
import { Button, Input, ErrorMessage } from "@/components/atoms"; import { Button, Input, ErrorMessage } from "@/components/atoms";
import { FormField } from "@/components/molecules/FormField/FormField"; import { FormField } from "@/components/molecules/FormField/FormField";
import { usePasswordReset } from "../../hooks/use-auth"; import { usePasswordReset } from "../../hooks/use-auth";
import { useZodForm } from "@customer-portal/validation"; import { useZodForm } from "@/hooks/useZodForm";
import { passwordResetRequestSchema, passwordResetSchema } from "@customer-portal/domain/auth"; import { passwordResetRequestSchema, passwordResetSchema } from "@customer-portal/domain/auth";
import { z } from "zod"; import { z } from "zod";

View File

@ -9,7 +9,7 @@ import Link from "next/link";
import { Button, Input, ErrorMessage } from "@/components/atoms"; import { Button, Input, ErrorMessage } from "@/components/atoms";
import { FormField } from "@/components/molecules/FormField/FormField"; import { FormField } from "@/components/molecules/FormField/FormField";
import { useWhmcsLink } from "../../hooks/use-auth"; import { useWhmcsLink } from "../../hooks/use-auth";
import { useZodForm } from "@customer-portal/validation"; import { useZodForm } from "@/hooks/useZodForm";
import { import {
setPasswordRequestSchema, setPasswordRequestSchema,
checkPasswordStrength, checkPasswordStrength,

View File

@ -13,7 +13,7 @@ import {
buildSignupRequest, buildSignupRequest,
} from "@customer-portal/domain/auth"; } from "@customer-portal/domain/auth";
import { addressFormSchema } from "@customer-portal/domain/customer"; import { addressFormSchema } from "@customer-portal/domain/customer";
import { useZodForm } from "@customer-portal/validation"; import { useZodForm } from "@/hooks/useZodForm";
import { z } from "zod"; import { z } from "zod";
import { MultiStepForm } from "./MultiStepForm"; import { MultiStepForm } from "./MultiStepForm";

View File

@ -3,7 +3,7 @@
import { useEffect } from "react"; import { useEffect } from "react";
import { MapPinIcon, ExclamationTriangleIcon } from "@heroicons/react/24/outline"; import { MapPinIcon, ExclamationTriangleIcon } from "@heroicons/react/24/outline";
import { COUNTRY_OPTIONS, getCountryCodeByName } from "@/lib/constants/countries"; import { COUNTRY_OPTIONS, getCountryCodeByName } from "@/lib/constants/countries";
import { useZodForm } from "@customer-portal/validation"; import { useZodForm } from "@/hooks/useZodForm";
import { import {
addressFormSchema, addressFormSchema,
type AddressFormData, type AddressFormData,

View File

@ -1,6 +1,6 @@
/** /**
* Framework-agnostic Zod form utilities for React environments. * Zod form utilities for React
* Provides predictable error and touched state handling. * Provides predictable error and touched state handling for forms
*/ */
import { useCallback, useMemo, useState } from "react"; import { useCallback, useMemo, useState } from "react";
@ -215,8 +215,6 @@ export function useZodForm<TValues extends Record<string, unknown>>({
const message = error instanceof Error ? error.message : String(error); const message = error instanceof Error ? error.message : String(error);
setSubmitError(message); setSubmitError(message);
setErrors(prev => ({ ...prev, _form: message })); setErrors(prev => ({ ...prev, _form: message }));
// Errors are captured in state so we avoid rethrowing to prevent unhandled rejections in callers
// Note: Logging should be handled by the consuming application
} finally { } finally {
setIsSubmitting(false); setIsSubmitting(false);
} }
@ -250,3 +248,4 @@ export function useZodForm<TValues extends Record<string, unknown>>({
reset, reset,
}; };
} }

View File

@ -0,0 +1,42 @@
services:
postgres:
image: postgres:17-alpine
container_name: portal_dev_postgres
environment:
POSTGRES_USER: ${POSTGRES_USER:-dev}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-dev}
POSTGRES_DB: ${POSTGRES_DB:-portal_dev}
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-dev} -d ${POSTGRES_DB:-portal_dev}"]
interval: 5s
timeout: 3s
retries: 20
restart: unless-stopped
redis:
image: redis:7-alpine
container_name: portal_dev_redis
command: ["redis-server", "--save", "20", "1", "--loglevel", "warning"]
ports:
- "6379:6379"
volumes:
- redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 3s
retries: 20
restart: unless-stopped
volumes:
postgres_data:
redis_data:
networks:
default:
name: portal_dev

View File

@ -0,0 +1,452 @@
# Complete Portainer Guide for Customer Portal
## Table of Contents
1. [Creating a Stack in Portainer](#creating-a-stack-in-portainer)
2. [Repository vs Upload vs Web Editor](#stack-creation-methods)
3. [Security Concerns & Best Practices](#security-concerns)
4. [Auto-Updating Images](#auto-updating-images)
5. [Recommended Setup for Production](#recommended-production-setup)
---
## Creating a Stack in Portainer
### Step 1: Access Portainer
1. Open Portainer UI (typically at `https://your-server:9443` or via Plesk)
2. Select your environment (usually "local" for Plesk)
3. Go to **Stacks** in the left sidebar
### Step 2: Create New Stack
Click **"+ Add stack"** button
You'll see three creation methods:
- **Web editor** - Paste compose file directly
- **Upload** - Upload a compose file
- **Repository** - Pull from Git repository
### Step 3: Configure the Stack
**Name:** `customer-portal` (lowercase, no spaces)
**Compose content:** Use one of the methods below
**Environment variables:** Add your configuration
### Step 4: Deploy
Click **"Deploy the stack"**
---
## Stack Creation Methods
### Method 1: Web Editor (Simplest)
**How:**
1. Select "Web editor"
2. Paste your `docker-compose.yml` content
3. Add environment variables manually or load from file
**Pros:**
- ✅ Quick and simple
- ✅ No external dependencies
- ✅ Full control over content
**Cons:**
- ❌ Manual updates required
- ❌ No version control
- ❌ Easy to make mistakes when editing
**Best for:** Quick testing, simple deployments
---
### Method 2: Upload (Recommended for Your Case)
**How:**
1. Select "Upload"
2. Upload your `docker-compose.yml` file
3. Optionally upload a `.env` file for environment variables
**Pros:**
- ✅ Version control on your local machine
- ✅ Can prepare and test locally
- ✅ No external network dependencies
- ✅ Works in air-gapped environments
**Cons:**
- ❌ Manual upload for each update
- ❌ Need to manage files locally
**Best for:** Production deployments with manual control
---
### Method 3: Repository (Git Integration)
**How:**
1. Select "Repository"
2. Enter repository URL (GitHub, GitLab, Bitbucket, etc.)
3. Specify branch and compose file path
4. Add authentication if private repo
**Example Configuration:**
```
Repository URL: https://github.com/your-org/customer-portal
Reference: main
Compose path: docker/portainer/docker-compose.yml
```
**For Private Repos:**
- Use a Personal Access Token (PAT) as password
- Or use deploy keys
**Pros:**
- ✅ Version controlled
- ✅ Easy to update (just click "Pull and redeploy")
- ✅ Team can review changes via PR
- ✅ Audit trail of changes
**Cons:**
- ❌ Requires network access to repo
- ❌ Secrets in repo = security risk
- ❌ Need to manage repo access tokens
- ❌ Compose file changes require git push
**Best for:** Teams, CI/CD pipelines, frequent updates
---
### 📌 My Recommendation for Your Case
**Use: Upload + Environment Variables in Portainer UI**
Why:
1. Your compose file rarely changes (it's just orchestration)
2. Sensitive data stays in Portainer, not in Git
3. Image updates are done via environment variables
4. No external dependencies during deployment
---
## Security Concerns
### 🔴 Critical Security Issues
#### 1. Never Store Secrets in Git
```yaml
# ❌ BAD - Secrets in compose file
environment:
JWT_SECRET: "my-actual-secret-here"
DATABASE_URL: "postgresql://user:password@db/prod"
# ✅ GOOD - Use environment variables
environment:
JWT_SECRET: ${JWT_SECRET}
DATABASE_URL: ${DATABASE_URL}
```
#### 2. Never Store Secrets in Docker Images
```dockerfile
# ❌ BAD - Secrets baked into image
ENV JWT_SECRET="my-secret"
COPY secrets/ /app/secrets/
# ✅ GOOD - Mount at runtime
# (secrets passed via env vars or volume mounts)
```
#### 3. Portainer Access Control
```
⚠️ Portainer has full Docker access = root on the host
Best practices:
- Use strong passwords
- Enable 2FA if available
- Restrict network access to Portainer UI
- Use HTTPS only
- Create separate users with limited permissions
```
### 🟡 Medium Security Concerns
#### 4. Environment Variables in Portainer
```
Portainer stores env vars in its database.
This is generally safe, but consider:
- Portainer database is at /data/portainer.db
- Anyone with Portainer admin = sees all secrets
- Backup files may contain secrets
Mitigation:
- Limit Portainer admin access
- Use Docker secrets for highly sensitive data
- Encrypt backups
```
#### 5. Image Trust
```
⚠️ You're loading .tar files - verify their integrity
Best practice:
- Generate checksums when building
- Verify checksums before loading
- Use signed images if possible
```
Add to build script:
```bash
# Generate checksums
sha256sum portal-frontend.latest.tar > portal-frontend.latest.tar.sha256
sha256sum portal-backend.latest.tar > portal-backend.latest.tar.sha256
# Verify on server
sha256sum -c portal-frontend.latest.tar.sha256
sha256sum -c portal-backend.latest.tar.sha256
```
#### 6. Network Exposure
```yaml
# ❌ BAD - Database exposed to host
database:
ports:
- "5432:5432" # Accessible from outside!
# ✅ GOOD - Internal network only
database:
# No ports exposed - only accessible via portal-network
networks:
- portal-network
```
### 🟢 Good Security Practices (Already in Place)
Your current setup does these right:
- ✅ Non-root users in containers
- ✅ Health checks configured
- ✅ Database/Redis not exposed externally
- ✅ Secrets mounted as read-only volumes
- ✅ Production error messages hide sensitive info
---
## Auto-Updating Images
### Option 1: Watchtower (NOT Recommended for Production)
Watchtower automatically updates containers when new images are available.
```yaml
# Add to your stack (if using registry)
watchtower:
image: containrrr/watchtower
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- WATCHTOWER_POLL_INTERVAL=300
- WATCHTOWER_CLEANUP=true
command: --include-stopped portal-frontend portal-backend
```
**Why NOT recommended:**
- ❌ No control over when updates happen
- ❌ No rollback mechanism
- ❌ Can break production unexpectedly
- ❌ Requires images in a registry (not .tar files)
We've disabled Watchtower in your compose:
```yaml
labels:
- "com.centurylinklabs.watchtower.enable=false"
```
---
### Option 2: Portainer Webhooks (Semi-Automatic)
Portainer can expose a webhook URL that triggers stack redeployment.
**Setup:**
1. Go to Stack → Settings
2. Enable "Webhook"
3. Copy the webhook URL
**Trigger from CI/CD:**
```bash
# In your GitHub Actions / GitLab CI
curl -X POST "https://your-portainer:9443/api/stacks/webhook/abc123"
```
**Workflow:**
```
Build Images → Push to Registry → Trigger Webhook → Portainer Redeploys
```
**Pros:**
- ✅ Controlled updates
- ✅ Integrated with CI/CD
- ✅ Can add approval gates
**Cons:**
- ❌ Requires images in a registry
- ❌ Webhook URL is a secret
- ❌ Limited rollback options
---
### Option 3: Manual Script (Recommended for Your Case) ✅
Since you're using `.tar` files (no registry), a manual update script is best:
```bash
# On your local machine after building:
./scripts/plesk/build-images.sh --tag v1.2.3
# Upload to server
scp portal-*.v1.2.3.tar user@server:/path/to/images/
# SSH and run update
ssh user@server "cd /path/to/portal && ./update-stack.sh v1.2.3"
```
**Make it a one-liner:**
```bash
# deploy.sh - Run locally
#!/bin/bash
TAG=$1
SERVER="user@your-server"
REMOTE_PATH="/var/www/vhosts/domain/portal"
# Build
./scripts/plesk/build-images.sh --tag "$TAG"
# Upload
scp portal-frontend.${TAG}.tar portal-backend.${TAG}.tar ${SERVER}:${REMOTE_PATH}/images/
# Deploy
ssh $SERVER "cd ${REMOTE_PATH} && ./update-stack.sh ${TAG}"
echo "✅ Deployed ${TAG}"
```
---
### Option 4: Use a Container Registry (Most Professional)
If you want auto-updates, use a registry:
**Free Options:**
- GitHub Container Registry (ghcr.io) - free for public repos
- GitLab Container Registry - free
- Docker Hub - 1 private repo free
**Setup:**
```bash
# Build and push
./scripts/plesk/build-images.sh --tag v1.2.3 --push ghcr.io/your-org
# Update compose to use registry
services:
frontend:
image: ghcr.io/your-org/portal-frontend:${TAG:-latest}
```
**Then use Watchtower or webhooks for auto-updates.**
---
## Recommended Production Setup
### For Your Current Situation (No Registry)
```
┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐
│ Local Dev │ │ Plesk Server │ │ Portainer │
│ │ │ │ │ │
│ 1. Build images │───▶│ 2. Load .tar │───▶│ 3. Update stack │
│ with tag │ │ files │ │ env vars │
│ │ │ │ │ │
└─────────────────┘ └──────────────────┘ └─────────────────┘
▲ │
│ ▼
└──────────────────────────────────────────────┘
4. Verify & rollback if needed
```
**Steps:**
1. Build: `./scripts/plesk/build-images.sh --tag 20241201-abc`
2. Upload: `scp *.tar server:/path/images/`
3. Load: `docker load -i *.tar`
4. Update: Change `FRONTEND_IMAGE` and `BACKEND_IMAGE` in Portainer
5. Redeploy: Click "Update the stack" in Portainer
### For Future (With Registry)
```
┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐
│ GitHub │ │ GitHub │ │ Portainer │
│ (Code) │───▶│ Actions │───▶│ (Webhook) │
│ │ │ (Build & Push) │ │ │
└─────────────────┘ └────────┬─────────┘ └────────┬────────┘
│ │
▼ ▼
┌────────────────┐ ┌────────────────┐
│ ghcr.io │ │ Plesk Server │
│ (Registry) │◀─────│ (Pull Image) │
└────────────────┘ └────────────────┘
```
---
## Quick Reference: Portainer Stack Commands
### Via Portainer UI
| Action | Steps |
|--------|-------|
| Create stack | Stacks → Add stack → Configure → Deploy |
| Update stack | Stacks → Select → Editor → Update |
| Change image | Stacks → Select → Env vars → Change IMAGE → Update |
| View logs | Stacks → Select → Container → Logs |
| Restart | Stacks → Select → Container → Restart |
| Stop | Stacks → Select → Stop |
| Delete | Stacks → Select → Delete |
### Via CLI (on server)
```bash
# Navigate to stack directory
cd /path/to/portal
# View status
docker compose --env-file stack.env ps
# View logs
docker compose --env-file stack.env logs -f
# Restart
docker compose --env-file stack.env restart
# Update (after changing stack.env)
docker compose --env-file stack.env up -d
# Stop
docker compose --env-file stack.env down
# Stop and remove volumes (⚠️ DATA LOSS)
docker compose --env-file stack.env down -v
```
---
## Summary
| Aspect | Recommendation |
|--------|---------------|
| Stack creation | **Upload** method (version control locally, no secrets in git) |
| Secrets management | **Portainer env vars** or **mounted secrets volume** |
| Image updates | **Manual script** for now, migrate to **registry + webhook** later |
| Auto-updates | **Not recommended** for production; use controlled deployments |
| Rollback | Keep previous image tags, update env vars to rollback |

126
docker/portainer/README.md Normal file
View File

@ -0,0 +1,126 @@
# Customer Portal - Portainer Deployment
## Quick Setup
### 1. Load Docker Images
Upload your images to the server and load them:
```bash
docker load < portal-frontend-latest.tar
docker load < portal-backend-latest.tar
```
Verify they're loaded:
```bash
docker images | grep portal
```
### 2. Create Stack in Portainer
1. Go to Portainer → Stacks → Add Stack
2. Name: `customer-portal`
3. Paste contents of `docker-compose.yml`
4. Scroll down to **Environment Variables**
5. Copy variables from `stack.env.example` and fill in your values
### 3. Generate Secrets
Generate secure random values for:
```bash
# JWT Secret
openssl rand -base64 32
# CSRF Secret
openssl rand -base64 32
# PostgreSQL Password
openssl rand -base64 24 | tr -d '/+=' | cut -c1-32
```
Encode your Salesforce private key:
```bash
base64 -w 0 < sf-private.key
```
### 4. Deploy
Click **Deploy the stack** in Portainer.
---
## How It Works
This setup uses `network_mode: bridge` with Docker `links` to avoid the iptables/IPv6 issues that can occur on some servers.
### Key Points
- **No custom networks** - Uses Docker's default bridge network
- **Service discovery via links** - Services reference each other by container name
- **Localhost binding** - Ports are bound to `127.0.0.1` for security (use Nginx/Plesk to proxy)
- **All config via Portainer** - No external env files needed
### Service URLs (internal)
| Service | Internal URL |
|----------|-------------------------|
| Backend | http://backend:4000 |
| Database | postgresql://database:5432 |
| Redis | redis://cache:6379 |
### Nginx/Plesk Proxy
Configure your domain to proxy to:
- Frontend: `http://127.0.0.1:3000`
- Backend API: `http://127.0.0.1:4000`
---
## Updating the Stack
1. Load new images:
```bash
docker load < portal-frontend-latest.tar
docker load < portal-backend-latest.tar
```
2. In Portainer: Stacks → customer-portal → **Update the stack**
3. Check **Re-pull image** and click **Update**
---
## Troubleshooting
### View Logs
```bash
docker logs portal-frontend
docker logs portal-backend
docker logs portal-database
docker logs portal-cache
```
### Check Container Health
```bash
docker ps
```
### Access Database
```bash
docker exec -it portal-database psql -U portal -d portal_prod
```
### Test Service Connectivity
```bash
# From backend container
docker exec -it portal-backend sh -c "nc -zv database 5432"
docker exec -it portal-backend sh -c "nc -zv cache 6379"
```

View File

@ -0,0 +1,169 @@
# =============================================================================
# Customer Portal - Portainer Stack (Bridge Network Mode)
# =============================================================================
# Uses Docker's default bridge network to avoid iptables issues
# All env vars passed via Portainer UI
# =============================================================================
services:
# ---------------------------------------------------------------------------
# Frontend (Next.js)
# ---------------------------------------------------------------------------
frontend:
image: ${FRONTEND_IMAGE:-portal-frontend:latest}
container_name: portal-frontend
ports:
- "127.0.0.1:${FRONTEND_PORT:-3000}:3000"
environment:
- NODE_ENV=production
- PORT=3000
- HOSTNAME=0.0.0.0
restart: unless-stopped
depends_on:
- backend
network_mode: bridge
links:
- backend
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/api/health"]
interval: 30s
timeout: 10s
start_period: 40s
retries: 3
# ---------------------------------------------------------------------------
# Backend (NestJS BFF)
# ---------------------------------------------------------------------------
backend:
image: ${BACKEND_IMAGE:-portal-backend:latest}
container_name: portal-backend
ports:
- "127.0.0.1:${BACKEND_PORT:-4000}:4000"
environment:
# Core
- NODE_ENV=production
- APP_NAME=${APP_NAME:-customer-portal-bff}
- APP_BASE_URL=${APP_BASE_URL}
- BFF_PORT=4000
- PORT=4000
# Database - use "database" as host (via links)
- DATABASE_URL=postgresql://${POSTGRES_USER:-portal}:${POSTGRES_PASSWORD}@database:5432/${POSTGRES_DB:-portal_prod}?schema=public
# Redis - use "cache" as host (via links)
- REDIS_URL=redis://cache:6379/0
# Security
- JWT_SECRET=${JWT_SECRET}
- JWT_EXPIRES_IN=${JWT_EXPIRES_IN:-7d}
- BCRYPT_ROUNDS=${BCRYPT_ROUNDS:-12}
- CORS_ORIGIN=${CORS_ORIGIN}
- TRUST_PROXY=true
- CSRF_SECRET_KEY=${CSRF_SECRET_KEY}
# Auth
- AUTH_ALLOW_REDIS_TOKEN_FAILOPEN=${AUTH_ALLOW_REDIS_TOKEN_FAILOPEN:-false}
- AUTH_REQUIRE_REDIS_FOR_TOKENS=${AUTH_REQUIRE_REDIS_FOR_TOKENS:-false}
- AUTH_MAINTENANCE_MODE=${AUTH_MAINTENANCE_MODE:-false}
# Rate Limiting
- RATE_LIMIT_TTL=${RATE_LIMIT_TTL:-60}
- RATE_LIMIT_LIMIT=${RATE_LIMIT_LIMIT:-100}
- EXPOSE_VALIDATION_ERRORS=false
# WHMCS
- WHMCS_BASE_URL=${WHMCS_BASE_URL}
- WHMCS_API_IDENTIFIER=${WHMCS_API_IDENTIFIER}
- WHMCS_API_SECRET=${WHMCS_API_SECRET}
# Salesforce
- SF_LOGIN_URL=${SF_LOGIN_URL}
- SF_CLIENT_ID=${SF_CLIENT_ID}
- SF_USERNAME=${SF_USERNAME}
- SF_EVENTS_ENABLED=${SF_EVENTS_ENABLED:-true}
- SF_PRIVATE_KEY_BASE64=${SF_PRIVATE_KEY_BASE64}
- SF_PRIVATE_KEY_PATH=/app/secrets/sf-private.key
# Freebit
- FREEBIT_BASE_URL=${FREEBIT_BASE_URL:-https://i1.mvno.net/emptool/api}
- FREEBIT_OEM_ID=${FREEBIT_OEM_ID:-PASI}
- FREEBIT_OEM_KEY=${FREEBIT_OEM_KEY}
# Email
- EMAIL_ENABLED=${EMAIL_ENABLED:-true}
- EMAIL_FROM=${EMAIL_FROM:-no-reply@asolutions.jp}
- EMAIL_FROM_NAME=${EMAIL_FROM_NAME:-Assist Solutions}
- SENDGRID_API_KEY=${SENDGRID_API_KEY}
# Portal
- PORTAL_PRICEBOOK_ID=${PORTAL_PRICEBOOK_ID}
- PORTAL_PRICEBOOK_NAME=${PORTAL_PRICEBOOK_NAME:-Portal}
# Logging
- LOG_LEVEL=${LOG_LEVEL:-info}
# Enable automatic database migrations on startup
- RUN_MIGRATIONS=true
restart: unless-stopped
depends_on:
- database
- cache
network_mode: bridge
links:
- database
- cache
# Uses the built-in entrypoint which handles:
# - SF key decoding from SF_PRIVATE_KEY_BASE64
# - Database migration when RUN_MIGRATIONS=true
# - Waiting for dependencies (nc checks in entrypoint)
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4000/health"]
interval: 30s
timeout: 10s
start_period: 60s
retries: 3
# ---------------------------------------------------------------------------
# PostgreSQL Database
# ---------------------------------------------------------------------------
database:
image: postgres:17-alpine
container_name: portal-database
environment:
- POSTGRES_DB=${POSTGRES_DB:-portal_prod}
- POSTGRES_USER=${POSTGRES_USER:-portal}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
volumes:
- postgres_data:/var/lib/postgresql/data
restart: unless-stopped
network_mode: bridge
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-portal} -d ${POSTGRES_DB:-portal_prod}"]
interval: 10s
timeout: 5s
start_period: 30s
retries: 5
# ---------------------------------------------------------------------------
# Redis Cache
# ---------------------------------------------------------------------------
cache:
image: redis:7-alpine
container_name: portal-cache
command: ["redis-server", "--save", "60", "1", "--loglevel", "warning", "--maxmemory", "128mb", "--maxmemory-policy", "allkeys-lru"]
volumes:
- redis_data:/data
restart: unless-stopped
network_mode: bridge
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
volumes:
postgres_data:
driver: local
redis_data:
driver: local

View File

@ -0,0 +1,94 @@
# =============================================================================
# Customer Portal - Portainer Environment Variables (TEMPLATE)
# =============================================================================
# Copy this file and fill in your actual values.
# DO NOT commit files with real secrets to version control.
# =============================================================================
# -----------------------------------------------------------------------------
# Images & Ports
# -----------------------------------------------------------------------------
FRONTEND_IMAGE=portal-frontend:latest
BACKEND_IMAGE=portal-backend:latest
FRONTEND_PORT=3000
BACKEND_PORT=4000
# -----------------------------------------------------------------------------
# Application
# -----------------------------------------------------------------------------
APP_NAME=customer-portal-bff
APP_BASE_URL=https://your-domain.com
CORS_ORIGIN=https://your-domain.com
# -----------------------------------------------------------------------------
# Database (PostgreSQL)
# -----------------------------------------------------------------------------
POSTGRES_DB=portal_prod
POSTGRES_USER=portal
# Generate with: openssl rand -base64 24
POSTGRES_PASSWORD=<GENERATE-SECURE-PASSWORD>
# -----------------------------------------------------------------------------
# Security & Auth
# -----------------------------------------------------------------------------
# Generate with: openssl rand -base64 32
JWT_SECRET=<GENERATE-WITH-openssl-rand-base64-32>
JWT_EXPIRES_IN=7d
BCRYPT_ROUNDS=12
# Generate with: openssl rand -base64 32
CSRF_SECRET_KEY=<GENERATE-WITH-openssl-rand-base64-32>
# Auth Settings
AUTH_ALLOW_REDIS_TOKEN_FAILOPEN=false
AUTH_REQUIRE_REDIS_FOR_TOKENS=false
AUTH_MAINTENANCE_MODE=false
# Rate Limiting
RATE_LIMIT_TTL=60
RATE_LIMIT_LIMIT=100
# -----------------------------------------------------------------------------
# WHMCS Integration
# -----------------------------------------------------------------------------
WHMCS_BASE_URL=https://your-whmcs-instance.com
WHMCS_API_IDENTIFIER=<YOUR-WHMCS-API-IDENTIFIER>
WHMCS_API_SECRET=<YOUR-WHMCS-API-SECRET>
# -----------------------------------------------------------------------------
# Salesforce Integration
# -----------------------------------------------------------------------------
SF_LOGIN_URL=https://your-org.my.salesforce.com
SF_CLIENT_ID=<YOUR-SF-CONNECTED-APP-CLIENT-ID>
SF_USERNAME=<YOUR-SF-INTEGRATION-USERNAME>
SF_EVENTS_ENABLED=true
# Salesforce Private Key (Base64 encoded)
# To encode: base64 -w 0 < sf-private.key
SF_PRIVATE_KEY_BASE64=<BASE64-ENCODED-PRIVATE-KEY>
# -----------------------------------------------------------------------------
# Freebit SIM API
# -----------------------------------------------------------------------------
FREEBIT_BASE_URL=https://i1.mvno.net/emptool/api
FREEBIT_OEM_ID=<YOUR-OEM-ID>
FREEBIT_OEM_KEY=<YOUR-OEM-KEY>
# -----------------------------------------------------------------------------
# Email (SendGrid)
# -----------------------------------------------------------------------------
EMAIL_ENABLED=true
EMAIL_FROM=no-reply@your-domain.com
EMAIL_FROM_NAME=Your Company Name
SENDGRID_API_KEY=<YOUR-SENDGRID-API-KEY>
# -----------------------------------------------------------------------------
# Salesforce Portal Config
# -----------------------------------------------------------------------------
PORTAL_PRICEBOOK_ID=<YOUR-PRICEBOOK-ID>
PORTAL_PRICEBOOK_NAME=Portal
# -----------------------------------------------------------------------------
# Logging
# -----------------------------------------------------------------------------
LOG_LEVEL=info

View File

@ -0,0 +1,72 @@
#!/bin/bash
# =============================================================================
# Customer Portal - Image Loader Script for Portainer
# =============================================================================
# Usage: ./update-stack.sh <image-tag>
# Example: ./update-stack.sh 20241201-abc123
# ./update-stack.sh latest
#
# Note: After loading images, update the stack in Portainer UI
# =============================================================================
set -Eeuo pipefail
# Configuration
IMAGES_DIR="${IMAGES_DIR:-$(pwd)}"
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m'
log() { echo -e "${GREEN}[DEPLOY]${NC} $*"; }
warn() { echo -e "${YELLOW}[DEPLOY]${NC} $*"; }
fail() { echo -e "${RED}[DEPLOY] ERROR:${NC} $*"; exit 1; }
# Check arguments
TAG="${1:-latest}"
echo ""
log "Loading images with tag: ${TAG}"
echo ""
# Look for image files
FRONTEND_TAR="${IMAGES_DIR}/portal-frontend-${TAG}.tar"
BACKEND_TAR="${IMAGES_DIR}/portal-backend-${TAG}.tar"
# Also check alternative naming
if [[ ! -f "$FRONTEND_TAR" ]]; then
FRONTEND_TAR="${IMAGES_DIR}/portal-frontend.${TAG}.tar"
fi
if [[ ! -f "$BACKEND_TAR" ]]; then
BACKEND_TAR="${IMAGES_DIR}/portal-backend.${TAG}.tar"
fi
# Load frontend
if [[ -f "$FRONTEND_TAR" ]]; then
log "Loading frontend image from: $FRONTEND_TAR"
docker load -i "$FRONTEND_TAR"
else
warn "Frontend tarball not found: $FRONTEND_TAR"
fi
# Load backend
if [[ -f "$BACKEND_TAR" ]]; then
log "Loading backend image from: $BACKEND_TAR"
docker load -i "$BACKEND_TAR"
else
warn "Backend tarball not found: $BACKEND_TAR"
fi
echo ""
log "Current portal images:"
docker images | grep portal || echo "No portal images found"
echo ""
log "Next steps:"
echo " 1. Go to Portainer UI"
echo " 2. Navigate to Stacks → customer-portal"
echo " 3. Update FRONTEND_IMAGE and BACKEND_IMAGE if using specific tag"
echo " 4. Click 'Update the stack' with 'Re-pull image' checked"
echo ""

View File

@ -3,8 +3,13 @@
"version": "1.0.0", "version": "1.0.0",
"type": "commonjs", "type": "commonjs",
"description": "Unified domain layer with contracts, schemas, and provider mappers", "description": "Unified domain layer with contracts, schemas, and provider mappers",
"private": true,
"sideEffects": false,
"main": "./dist/index.js", "main": "./dist/index.js",
"types": "./dist/index.d.ts", "types": "./dist/index.d.ts",
"files": [
"dist"
],
"exports": { "exports": {
".": "./dist/index.js", ".": "./dist/index.js",
"./auth": "./dist/auth/index.js", "./auth": "./dist/auth/index.js",

View File

@ -1,65 +0,0 @@
{
"name": "@customer-portal/validation",
"version": "1.0.0",
"description": "Unified validation service for customer portal (NestJS + React)",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"private": true,
"sideEffects": false,
"files": [
"dist"
],
"exports": {
".": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
},
"./react": {
"types": "./dist/react/index.d.ts",
"default": "./dist/react/index.js"
},
"./nestjs": {
"types": "./dist/nestjs/index.d.ts",
"default": "./dist/nestjs/index.js"
}
},
"scripts": {
"build": "tsc -b",
"dev": "tsc -b -w --preserveWatchOutput",
"clean": "rm -rf dist",
"type-check": "NODE_OPTIONS=\"--max-old-space-size=2048 --max-semi-space-size=128\" tsc --project tsconfig.json --noEmit",
"test": "jest",
"lint": "eslint .",
"lint:fix": "eslint . --fix"
},
"dependencies": {
"@customer-portal/domain": "workspace:*",
"@nestjs/common": "^11.1.6",
"nestjs-pino": "^4.4.0",
"nestjs-zod": "^5.0.1",
"zod": "^4.1.9"
},
"peerDependencies": {
"@nestjs/common": "^11.0.0",
"react": "^19.0.0"
},
"peerDependenciesMeta": {
"@nestjs/common": {
"optional": true
},
"react": {
"optional": true
}
},
"devDependencies": {
"@types/react": "^19.1.10",
"@nestjs/common": "^11.1.6",
"react": "19.1.1",
"typescript": "^5.9.2",
"jest": "^30.0.5",
"@types/jest": "^30.0.0",
"nestjs-zod": "^5.0.1",
"express": "^5.1.0",
"@types/express": "^5.0.3"
}
}

View File

@ -1,10 +0,0 @@
/**
* Shared Validation Schemas
* Pure Zod schemas for API contracts - shared between frontend and backend
*/
// Re-export Zod for convenience
export { z } from "zod";
// Framework-specific exports
export * from "./react";

View File

@ -1,2 +0,0 @@
export { ZodValidationPipe, createZodDto, ZodValidationException } from "nestjs-zod";
export { ZodValidationExceptionFilter } from "./zod-exception.filter";

View File

@ -1,68 +0,0 @@
import { ArgumentsHost, Catch, ExceptionFilter, HttpStatus, Inject } from "@nestjs/common";
import type { Request, Response } from "express";
import { Logger } from "nestjs-pino";
import { ZodValidationException } from "nestjs-zod";
import type { ZodError, ZodIssue } from "zod";
interface ZodIssueResponse {
path: string;
message: string;
code: string;
}
@Catch(ZodValidationException)
export class ZodValidationExceptionFilter implements ExceptionFilter {
constructor(@Inject(Logger) private readonly logger: Logger) {}
catch(exception: ZodValidationException, host: ArgumentsHost): void {
const ctx = host.switchToHttp();
const response = ctx.getResponse<Response>();
const request = ctx.getRequest<Request>();
const rawZodError = exception.getZodError();
let issues: ZodIssueResponse[] = [];
if (!this.isZodError(rawZodError)) {
this.logger.error("ZodValidationException did not contain a ZodError", {
path: request.url,
method: request.method,
providedType: typeof rawZodError,
});
} else {
issues = this.mapIssues(rawZodError.issues);
}
this.logger.warn("Request validation failed", {
path: request.url,
method: request.method,
issues,
});
response.status(HttpStatus.BAD_REQUEST).json({
success: false as const,
error: {
code: "VALIDATION_FAILED",
message: "Request validation failed",
details: {
issues,
timestamp: new Date().toISOString(),
path: request.url,
},
},
});
}
private isZodError(error: unknown): error is ZodError {
return Boolean(
error && typeof error === "object" && Array.isArray((error as { issues?: unknown }).issues)
);
}
private mapIssues(issues: ZodIssue[]): ZodIssueResponse[] {
return issues.map(issue => ({
path: issue.path.join(".") || "root",
message: issue.message,
code: issue.code,
}));
}
}

View File

@ -1,7 +0,0 @@
/**
* React validation exports
* Simple Zod validation for React
*/
export { useZodForm } from "../zod-form";
export type { ZodFormOptions, UseZodFormReturn, FormErrors, FormTouched } from "../zod-form";

View File

@ -1,47 +0,0 @@
/**
* Simple Zod Validation Pipe for NestJS
* Just uses Zod as-is with clean error formatting
*/
import type { PipeTransform, ArgumentMetadata } from "@nestjs/common";
import { Injectable, BadRequestException } from "@nestjs/common";
import type { ZodSchema } from "zod";
import { ZodError } from "zod";
@Injectable()
export class ZodValidationPipe implements PipeTransform {
constructor(private readonly schema: ZodSchema) {}
transform(value: unknown, _metadata: ArgumentMetadata): unknown {
try {
return this.schema.parse(value);
} catch (error) {
if (error instanceof ZodError) {
const errors = error.issues.map(issue => ({
field: issue.path.join(".") || "root",
message: issue.message,
code: issue.code,
}));
throw new BadRequestException({
message: "Validation failed",
errors,
statusCode: 400,
});
}
const message = error instanceof Error ? error.message : "Validation failed";
throw new BadRequestException(message);
}
}
}
/**
* Factory function to create Zod pipe (main export)
*/
export const ZodPipe = (schema: ZodSchema) => new ZodValidationPipe(schema);
/**
* Alternative factory function
*/
export const createZodPipe = (schema: ZodSchema) => new ZodValidationPipe(schema);

View File

@ -1,17 +0,0 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"outDir": "dist",
"rootDir": "src",
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"composite": true,
"tsBuildInfoFile": "dist/.tsbuildinfo",
"experimentalDecorators": true,
"emitDecoratorMetadata": true
},
"include": ["src/**/*"],
"exclude": ["dist", "node_modules", "**/*.test.ts", "**/*.spec.ts"],
"references": [{ "path": "../domain" }]
}

46
pnpm-lock.yaml generated
View File

@ -70,9 +70,6 @@ importers:
'@customer-portal/logging': '@customer-portal/logging':
specifier: workspace:* specifier: workspace:*
version: link:../../packages/logging version: link:../../packages/logging
'@customer-portal/validation':
specifier: workspace:*
version: link:../../packages/validation
'@nestjs/bullmq': '@nestjs/bullmq':
specifier: ^11.0.3 specifier: ^11.0.3
version: 11.0.3(@nestjs/common@11.1.6(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.6)(bullmq@5.58.5) version: 11.0.3(@nestjs/common@11.1.6(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.6)(bullmq@5.58.5)
@ -272,9 +269,6 @@ importers:
'@customer-portal/domain': '@customer-portal/domain':
specifier: workspace:* specifier: workspace:*
version: link:../../packages/domain version: link:../../packages/domain
'@customer-portal/validation':
specifier: workspace:*
version: link:../../packages/validation
'@heroicons/react': '@heroicons/react':
specifier: ^2.2.0 specifier: ^2.2.0
version: 2.2.0(react@19.1.1) version: 2.2.0(react@19.1.1)
@ -375,46 +369,6 @@ importers:
specifier: ^5.9.2 specifier: ^5.9.2
version: 5.9.2 version: 5.9.2
packages/validation:
dependencies:
'@customer-portal/domain':
specifier: workspace:*
version: link:../domain
'@nestjs/common':
specifier: ^11.1.6
version: 11.1.6(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2)(rxjs@7.8.2)
nestjs-pino:
specifier: ^4.4.0
version: 4.4.0(@nestjs/common@11.1.6(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2)(rxjs@7.8.2))(pino-http@10.5.0)(pino@9.9.5)(rxjs@7.8.2)
nestjs-zod:
specifier: ^5.0.1
version: 5.0.1(@nestjs/common@11.1.6(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/swagger@11.2.0(@nestjs/common@11.1.6(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.6)(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2))(rxjs@7.8.2)(zod@4.1.9)
zod:
specifier: ^4.1.9
version: 4.1.9
devDependencies:
'@types/express':
specifier: ^5.0.3
version: 5.0.3
'@types/jest':
specifier: ^30.0.0
version: 30.0.0
'@types/react':
specifier: ^19.1.10
version: 19.1.12
express:
specifier: ^5.1.0
version: 5.1.0
jest:
specifier: ^30.0.5
version: 30.1.3(@types/node@24.3.1)(ts-node@10.9.2(@types/node@24.3.1)(typescript@5.9.2))
react:
specifier: 19.1.1
version: 19.1.1
typescript:
specifier: ^5.9.2
version: 5.9.2
packages: packages:
'@alloc/quick-lru@5.2.0': '@alloc/quick-lru@5.2.0':

View File

@ -1 +0,0 @@
735d984b4fc0c5de1404ee95991e6a0ab627e815a46fbb2e3002240a551146a2 /home/barsa/projects/customer_portal/customer-portal/portal-backend.latest.tar.gz

View File

@ -1 +0,0 @@
de99755961ca5a0d2b8713b1a57b6d818cb860d0eb87387c4ff508882d2f6984 /home/barsa/projects/customer_portal/customer-portal/portal-backend.latest.tar

View File

@ -1 +0,0 @@
2d1c7887410361baefcc3f2038dce9079ca6fa19d5afa29e8281c99a40d020c7 /home/barsa/projects/customer_portal/customer-portal/portal-frontend.latest.tar.gz

View File

@ -1 +0,0 @@
ea3c21988f94a9f8755e1024d45187afad435df399c79c17934e701ca7c4ad9b /home/barsa/projects/customer_portal/customer-portal/portal-frontend.latest.tar

View File

@ -77,7 +77,8 @@ cd "$PROJECT_ROOT"
# Enable BuildKit # Enable BuildKit
export DOCKER_BUILDKIT=1 export DOCKER_BUILDKIT=1
# Build args # Build args (can be overridden via env vars)
PNPM_VERSION="${PNPM_VERSION:-10.15.0}"
NEXT_PUBLIC_API_BASE="${NEXT_PUBLIC_API_BASE:-/api}" NEXT_PUBLIC_API_BASE="${NEXT_PUBLIC_API_BASE:-/api}"
NEXT_PUBLIC_APP_NAME="${NEXT_PUBLIC_APP_NAME:-Customer Portal}" NEXT_PUBLIC_APP_NAME="${NEXT_PUBLIC_APP_NAME:-Customer Portal}"
GIT_SOURCE="$(git config --get remote.origin.url 2>/dev/null || echo unknown)" GIT_SOURCE="$(git config --get remote.origin.url 2>/dev/null || echo unknown)"
@ -90,6 +91,7 @@ mkdir -p "$LOG_DIR"
build_frontend() { build_frontend() {
local logfile="$LOG_DIR/frontend.log" local logfile="$LOG_DIR/frontend.log"
docker build -f apps/portal/Dockerfile \ docker build -f apps/portal/Dockerfile \
--build-arg "PNPM_VERSION=${PNPM_VERSION}" \
--build-arg "NEXT_PUBLIC_API_BASE=${NEXT_PUBLIC_API_BASE}" \ --build-arg "NEXT_PUBLIC_API_BASE=${NEXT_PUBLIC_API_BASE}" \
--build-arg "NEXT_PUBLIC_APP_NAME=${NEXT_PUBLIC_APP_NAME}" \ --build-arg "NEXT_PUBLIC_APP_NAME=${NEXT_PUBLIC_APP_NAME}" \
--build-arg "NEXT_PUBLIC_APP_VERSION=${IMAGE_TAG}" \ --build-arg "NEXT_PUBLIC_APP_VERSION=${IMAGE_TAG}" \
@ -110,6 +112,7 @@ build_frontend() {
build_backend() { build_backend() {
local logfile="$LOG_DIR/backend.log" local logfile="$LOG_DIR/backend.log"
docker build -f apps/bff/Dockerfile \ docker build -f apps/bff/Dockerfile \
--build-arg "PNPM_VERSION=${PNPM_VERSION}" \
-t "${IMAGE_BACKEND}:latest" -t "${IMAGE_BACKEND}:${IMAGE_TAG}" \ -t "${IMAGE_BACKEND}:latest" -t "${IMAGE_BACKEND}:${IMAGE_TAG}" \
--label "org.opencontainers.image.version=${IMAGE_TAG}" \ --label "org.opencontainers.image.version=${IMAGE_TAG}" \
--label "org.opencontainers.image.source=${GIT_SOURCE}" \ --label "org.opencontainers.image.source=${GIT_SOURCE}" \