Merge branch 'main' into Tema-v1

This commit is contained in:
NTumurbars 2025-08-30 15:51:31 +09:00 committed by GitHub
commit b99f256da3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
33 changed files with 699 additions and 1622 deletions

View File

@ -1,103 +0,0 @@
# 🚀 Customer Portal - Development Environment
# Copy this file to .env for local development
# This configuration is optimized for development with hot-reloading
# =============================================================================
# 🌐 APPLICATION CONFIGURATION
# =============================================================================
NODE_ENV=development
APP_NAME=customer-portal-bff
BFF_PORT=4000
APP_BASE_URL=http://localhost:3000
# =============================================================================
# 🔐 SECURITY CONFIGURATION (Development)
# =============================================================================
# Development JWT secret (OK to use simple secret for local dev)
JWT_SECRET=dev_secret_for_local_development_minimum_32_chars_long
JWT_EXPIRES_IN=7d
# Password Hashing (Minimum rounds for security compliance)
BCRYPT_ROUNDS=10
# =============================================================================
# 🗄️ DATABASE & CACHE (Development)
# =============================================================================
# Local Docker services
DATABASE_URL=postgresql://dev:dev@localhost:5432/portal_dev?schema=public
REDIS_URL=redis://localhost:6379
# =============================================================================
# 🌍 NETWORK & CORS (Development)
# =============================================================================
# Allow local frontend
CORS_ORIGIN=http://localhost:3000
TRUST_PROXY=false
# =============================================================================
# 🚦 RATE LIMITING (Development)
# =============================================================================
# Relaxed rate limiting for development
RATE_LIMIT_TTL=60000
RATE_LIMIT_LIMIT=100
AUTH_RATE_LIMIT_TTL=900000
AUTH_RATE_LIMIT_LIMIT=3
# =============================================================================
# 🏢 EXTERNAL INTEGRATIONS (Development)
# =============================================================================
# WHMCS Integration (Demo/Test Environment)
WHMCS_BASE_URL=https://demo.whmcs.com
WHMCS_API_IDENTIFIER=your_demo_identifier
WHMCS_API_SECRET=your_demo_secret
WHMCS_WEBHOOK_SECRET=your_dev_webhook_secret
# Salesforce Integration (Sandbox Environment)
SF_LOGIN_URL=https://test.salesforce.com
SF_CLIENT_ID=your_dev_client_id
SF_PRIVATE_KEY_PATH=./secrets/sf-dev.key
SF_USERNAME=dev@yourcompany.com.sandbox
SF_WEBHOOK_SECRET=your_dev_webhook_secret
# =============================================================================
# 📊 LOGGING (Development)
# =============================================================================
# Verbose logging for development
LOG_LEVEL=debug
# =============================================================================
# 🎯 FRONTEND CONFIGURATION (Development)
# =============================================================================
# NEXT_PUBLIC_ variables are exposed to browser
NEXT_PUBLIC_APP_NAME=Customer Portal (Dev)
NEXT_PUBLIC_APP_VERSION=1.0.0-dev
NEXT_PUBLIC_API_BASE=http://localhost:4000
NEXT_PUBLIC_ENABLE_DEVTOOLS=true
# =============================================================================
# 🎛️ DEVELOPMENT OPTIONS
# =============================================================================
# Node.js options for development
NODE_OPTIONS=--no-deprecation
# =============================================================================
# ✉️ EMAIL (SendGrid) - Development
# =============================================================================
SENDGRID_API_KEY=
EMAIL_FROM=no-reply@localhost.test
EMAIL_FROM_NAME=Assist Solutions (Dev)
EMAIL_ENABLED=true
EMAIL_USE_QUEUE=true
SENDGRID_SANDBOX=true
# Optional: dynamic template IDs (use {{resetUrl}} in reset template)
EMAIL_TEMPLATE_RESET=
EMAIL_TEMPLATE_WELCOME=
# =============================================================================
# 🚀 QUICK START (Development)
# =============================================================================
# 1. Copy this template: cp .env.dev.example .env
# 2. Edit .env with your development values
# 3. Start services: pnpm dev:start
# 4. Start apps: pnpm dev
# 5. Access: Frontend http://localhost:3000, Backend http://localhost:4000

View File

@ -1,72 +1,93 @@
# ====== Core ======
NODE_ENV=production
plesk# 🚀 Customer Portal - Development Environment Example
# Copy this file to .env for local development
# This configuration is optimized for development with hot-reloading
# ====== Frontend (Next.js) ======
NEXT_PUBLIC_APP_NAME=Customer Portal
NEXT_PUBLIC_APP_VERSION=1.0.0
# If using Plesk single domain with /api proxied to backend, set to your main domain
# Example: https://portal.example.com or https://example.com
NEXT_PUBLIC_API_BASE=https://CHANGE_THIS
# =============================================================================
# 🗄️ DATABASE CONFIGURATION (Development)
# =============================================================================
DATABASE_URL="postgresql://dev:dev@localhost:5432/portal_dev?schema=public"
# ====== Backend (NestJS BFF) ======
# =============================================================================
# 🔴 REDIS CONFIGURATION (Development)
# =============================================================================
REDIS_URL="redis://localhost:6379"
# =============================================================================
# 🌐 APPLICATION CONFIGURATION (Development)
# =============================================================================
# Backend Configuration
BFF_PORT=4000
APP_BASE_URL=https://CHANGE_THIS
APP_NAME="customer-portal-bff"
NODE_ENV="development"
# ====== Database (PostgreSQL) ======
POSTGRES_DB=portal_prod
POSTGRES_USER=portal
POSTGRES_PASSWORD=CHANGE_THIS
# Frontend Configuration (NEXT_PUBLIC_ variables are exposed to browser)
NEXT_PORT=3000
NEXT_PUBLIC_APP_NAME="Customer Portal (Dev)"
NEXT_PUBLIC_APP_VERSION="1.0.0-dev"
NEXT_PUBLIC_API_BASE="http://localhost:4000"
NEXT_PUBLIC_ENABLE_DEVTOOLS="true"
# Prisma style DATABASE_URL for Postgres inside Compose network
# For Plesk Compose, hostname is the service name 'database'
DATABASE_URL=postgresql://portal:${POSTGRES_PASSWORD}@database:5432/${POSTGRES_DB}?schema=public
# =============================================================================
# 🔐 SECURITY CONFIGURATION (Development)
# =============================================================================
# JWT Secret (Development - OK to use simple secret)
JWT_SECRET="HjHsUyTE3WhPn5N07iSvurdV4hk2VEkIuN+lIflHhVQ="
JWT_EXPIRES_IN="7d"
# ====== Redis ======
REDIS_URL=redis://cache:6379/0
# Password Hashing (Minimum rounds for security compliance)
BCRYPT_ROUNDS=10
# ====== Security ======
JWT_SECRET=CHANGE_THIS
JWT_EXPIRES_IN=7d
BCRYPT_ROUNDS=12
# CORS (Allow local frontend)
CORS_ORIGIN="http://localhost:3000"
# ====== CORS ======
# If portal: https://portal.example.com ; if root domain: https://example.com
CORS_ORIGIN=https://CHANGE_THIS
# =============================================================================
# 🏢 EXTERNAL API CONFIGURATION (Development)
# =============================================================================
# WHMCS Integration (use your actual credentials)
WHMCS_BASE_URL="https://accounts.asolutions.co.jp"
WHMCS_API_IDENTIFIER="your_whmcs_api_identifier"
WHMCS_API_SECRET="your_whmcs_api_secret"
# ====== External APIs (optional) ======
WHMCS_BASE_URL=
WHMCS_API_IDENTIFIER=
WHMCS_API_SECRET=
SF_LOGIN_URL=
SF_CLIENT_ID=
SF_PRIVATE_KEY_PATH=/app/secrets/salesforce.key
SF_USERNAME=
# Salesforce Integration (use your actual credentials)
SF_LOGIN_URL="https://asolutions.my.salesforce.com"
SF_CLIENT_ID="your_salesforce_client_id"
SF_PRIVATE_KEY_PATH="./secrets/sf-private.key"
SF_USERNAME="your_salesforce_username"
# ====== Salesforce Pricing ======
# Portal Pricebook ID for product pricing (defaults to Portal pricebook)
PORTAL_PRICEBOOK_ID=01sTL000008eLVlYAM
# Salesforce Pricing
PORTAL_PRICEBOOK_ID="01sTL000008eLVlYAM"
# ====== Logging ======
LOG_LEVEL=info
LOG_FORMAT=json
# =============================================================================
# 📊 LOGGING CONFIGURATION (Development)
# =============================================================================
LOG_LEVEL="debug"
LOG_FORMAT="pretty"
# ====== Email (SendGrid) ======
# API key: https://app.sendgrid.com/settings/api_keys
SENDGRID_API_KEY=
# From address for outbound email
EMAIL_FROM=no-reply@yourdomain.com
EMAIL_FROM_NAME=Assist Solutions
# Master email switch
EMAIL_ENABLED=true
# Queue emails for async delivery (recommended)
EMAIL_USE_QUEUE=true
# Enable SendGrid sandbox mode (use true in non-prod to avoid delivery)
SENDGRID_SANDBOX=false
# Optional: dynamic template IDs (use {{resetUrl}} for reset template)
EMAIL_TEMPLATE_RESET=
EMAIL_TEMPLATE_WELCOME=
# =============================================================================
# 📧 EMAIL CONFIGURATION (Development)
# =============================================================================
# SendGrid (optional for development)
SENDGRID_API_KEY=""
EMAIL_FROM="no-reply@yourdomain.com"
EMAIL_FROM_NAME="Assist Solutions"
EMAIL_ENABLED=false
EMAIL_USE_QUEUE=false
SENDGRID_SANDBOX=true
EMAIL_TEMPLATE_RESET=""
EMAIL_TEMPLATE_WELCOME=""
# ====== Node options ======
NODE_OPTIONS=--max-old-space-size=512
# =============================================================================
# 🎛️ DEVELOPMENT CONFIGURATION
# =============================================================================
# Node.js options for development
NODE_OPTIONS="--no-deprecation"
# =============================================================================
# 🐳 DOCKER DEVELOPMENT NOTES
# =============================================================================
# For Docker development services (PostgreSQL + Redis only):
# 1. Run: pnpm dev:start
# 2. Frontend and Backend run locally (outside containers) for hot-reloading
# 3. Only database and cache services run in containers

7
.env.plesk Normal file
View File

@ -0,0 +1,7 @@
# GitHub Container Registry Authentication
# Replace with your actual GitHub personal access token (with read:packages scope)
GITHUB_TOKEN=your_github_personal_access_token_here
# Security note: Keep this file secure and don't commit it to Git
# This token allows pulling private images from GitHub Container Registry

View File

@ -1,117 +0,0 @@
# 🚀 Customer Portal - Production Environment
# Copy this file to .env for production deployment
# This configuration is optimized for production with security and performance
# =============================================================================
# 🌐 APPLICATION CONFIGURATION
# =============================================================================
NODE_ENV=production
APP_NAME=customer-portal-bff
BFF_PORT=4000
APP_BASE_URL=https://portal.yourdomain.com
# =============================================================================
# 🔐 SECURITY CONFIGURATION (Production)
# =============================================================================
# CRITICAL: Generate with: openssl rand -base64 32
JWT_SECRET=GENERATE_SECURE_JWT_SECRET_HERE_MINIMUM_32_CHARS
JWT_EXPIRES_IN=7d
# Password Hashing (High rounds for security)
BCRYPT_ROUNDS=12
# =============================================================================
# 🗄️ DATABASE & CACHE (Production)
# =============================================================================
# Docker internal networking (container names as hostnames)
DATABASE_URL=postgresql://portal:YOUR_SECURE_DB_PASSWORD@database:5432/portal_prod?schema=public
REDIS_URL=redis://cache:6379
# =============================================================================
# 🌍 NETWORK & CORS (Production)
# =============================================================================
# Your production domain
CORS_ORIGIN=https://yourdomain.com
TRUST_PROXY=true
# =============================================================================
# 🚦 RATE LIMITING (Production)
# =============================================================================
# Strict rate limiting for production
RATE_LIMIT_TTL=60000
RATE_LIMIT_LIMIT=100
AUTH_RATE_LIMIT_TTL=900000
AUTH_RATE_LIMIT_LIMIT=3
# =============================================================================
# 🏢 EXTERNAL INTEGRATIONS (Production)
# =============================================================================
# WHMCS Integration (Production Environment)
WHMCS_BASE_URL=https://your-whmcs-domain.com
WHMCS_API_IDENTIFIER=your_production_identifier
WHMCS_API_SECRET=your_production_secret
WHMCS_WEBHOOK_SECRET=your_whmcs_webhook_secret
# Salesforce Integration (Production Environment)
SF_LOGIN_URL=https://login.salesforce.com
SF_CLIENT_ID=your_production_client_id
SF_PRIVATE_KEY_PATH=/app/secrets/sf-prod.key
SF_USERNAME=production@yourcompany.com
SF_WEBHOOK_SECRET=your_salesforce_webhook_secret
# =============================================================================
# 📊 LOGGING (Production)
# =============================================================================
# Production logging level
LOG_LEVEL=info
# =============================================================================
# 🎯 FRONTEND CONFIGURATION (Production)
# =============================================================================
# NEXT_PUBLIC_ variables are exposed to browser
NEXT_PUBLIC_APP_NAME=Customer Portal
NEXT_PUBLIC_APP_VERSION=1.0.0
NEXT_PUBLIC_API_BASE=https://yourdomain.com
NEXT_PUBLIC_ENABLE_DEVTOOLS=false
# =============================================================================
# 🎛️ PRODUCTION OPTIONS
# =============================================================================
# Node.js options for production
NODE_OPTIONS=--max-old-space-size=2048
# =============================================================================
# ✉️ EMAIL (SendGrid) - Production
# =============================================================================
# Create and store securely (e.g., KMS/Secrets Manager)
SENDGRID_API_KEY=
EMAIL_FROM=no-reply@yourdomain.com
EMAIL_FROM_NAME=Assist Solutions
EMAIL_ENABLED=true
EMAIL_USE_QUEUE=true
SENDGRID_SANDBOX=false
# Optional: Dynamic Template IDs (recommended)
EMAIL_TEMPLATE_RESET=
EMAIL_TEMPLATE_WELCOME=
# =============================================================================
# 🔒 PRODUCTION SECURITY CHECKLIST
# =============================================================================
# ✅ Replace ALL default/demo values with real credentials
# ✅ Use strong, unique passwords and secrets (minimum 32 characters for JWT)
# ✅ Ensure SF_PRIVATE_KEY_PATH points to actual key file
# ✅ Set correct CORS_ORIGIN for your domain
# ✅ Use HTTPS URLs for all external services
# ✅ Verify DATABASE_URL password matches docker-compose.yml
# ✅ Test all integrations before going live
# ✅ Configure webhook secrets for security
# ✅ Set appropriate rate limiting values
# ✅ Enable trust proxy if behind reverse proxy
# =============================================================================
# 🚀 QUICK START (Production)
# =============================================================================
# 1. Copy this template: cp .env.production.example .env
# 2. Edit .env with your production values (REQUIRED!)
# 3. Deploy: pnpm prod:deploy
# 4. Access: https://yourdomain.com

View File

@ -1,46 +0,0 @@
name: CI
on:
push:
branches: [main]
pull_request:
branches: [main]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 22
cache: "pnpm"
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 10.15.0
- name: Install deps
run: pnpm install --frozen-lockfile
- name: Build Shared (needed for type refs)
run: pnpm --filter @customer-portal/shared run build
- name: Generate Prisma client
run: pnpm --filter @customer-portal/bff run db:generate
- name: Type check (workspace)
run: pnpm --recursive run type-check
- name: Lint (workspace)
run: pnpm --recursive run lint
- name: Build BFF
run: pnpm --filter @customer-portal/bff run build
- name: Build Portal
run: pnpm --filter @customer-portal/portal run build

107
.github/workflows/deploy.yml vendored Normal file
View File

@ -0,0 +1,107 @@
name: Build & Push Images
on:
workflow_dispatch: # Only allow manual triggers
# push:
# branches: [main] # Commented out - no auto-trigger
env:
REGISTRY: ghcr.io
IMAGE_NAME_PREFIX: ntumurbars/customer-portal
jobs:
build-and-push:
name: Build & Push Docker Images
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup pnpm
uses: pnpm/action-setup@v4
with:
version: 10.15.0
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 22
cache: 'pnpm'
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to Container Registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata for frontend
id: meta-frontend
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_PREFIX }}-frontend
tags: |
type=raw,value=latest,enable={{is_default_branch}}
type=sha,prefix=main-
- name: Extract metadata for backend
id: meta-backend
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME_PREFIX }}-backend
tags: |
type=raw,value=latest,enable={{is_default_branch}}
type=sha,prefix=main-
- name: Build and push frontend image
uses: docker/build-push-action@v5
with:
context: .
file: ./apps/portal/Dockerfile
platforms: linux/amd64
push: true
tags: ${{ steps.meta-frontend.outputs.tags }}
labels: ${{ steps.meta-frontend.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push backend image
uses: docker/build-push-action@v5
with:
context: .
file: ./apps/bff/Dockerfile
platforms: linux/amd64
push: true
tags: ${{ steps.meta-backend.outputs.tags }}
labels: ${{ steps.meta-backend.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build Summary
run: |
echo "## 🚀 Build Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Frontend Image:** \`${{ steps.meta-frontend.outputs.tags }}\`" >> $GITHUB_STEP_SUMMARY
echo "**Backend Image:** \`${{ steps.meta-backend.outputs.tags }}\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### 📦 Images Built:" >> $GITHUB_STEP_SUMMARY
echo "- **Frontend**: [ghcr.io/${{ env.IMAGE_NAME_PREFIX }}-frontend](https://github.com/NTumurbars/customer-portal/pkgs/container/customer-portal-frontend)" >> $GITHUB_STEP_SUMMARY
echo "- **Backend**: [ghcr.io/${{ env.IMAGE_NAME_PREFIX }}-backend](https://github.com/NTumurbars/customer-portal/pkgs/container/customer-portal-backend)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### 🚀 Next Steps:" >> $GITHUB_STEP_SUMMARY
echo "1. **SSH to Plesk server** and run:" >> $GITHUB_STEP_SUMMARY
echo " \`\`\`bash" >> $GITHUB_STEP_SUMMARY
echo " docker compose -f compose-plesk.yaml pull" >> $GITHUB_STEP_SUMMARY
echo " docker compose -f compose-plesk.yaml up -d" >> $GITHUB_STEP_SUMMARY
echo " \`\`\`" >> $GITHUB_STEP_SUMMARY
echo "2. **Or update via Plesk UI**: Docker → Stacks → customer-portal → Pull → Up" >> $GITHUB_STEP_SUMMARY

View File

@ -1,94 +0,0 @@
name: Test & Lint
on:
push:
branches: [main, develop]
pull_request:
branches: [main, develop]
env:
NODE_VERSION: "22"
PNPM_VERSION: "10.15.0"
jobs:
test:
name: Test & Lint
runs-on: ubuntu-latest
services:
postgres:
image: postgres:17
env:
POSTGRES_PASSWORD: test
POSTGRES_DB: portal_test
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
redis:
image: redis:8-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 6379:6379
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: ${{ env.NODE_VERSION }}
- name: Enable Corepack and install pnpm
run: |
corepack enable
corepack prepare pnpm@${{ env.PNPM_VERSION }} --activate
- name: Cache pnpm dependencies
uses: actions/cache@v4
with:
path: ~/.pnpm-store
key: ${{ runner.os }}-pnpm-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Build shared package (needed for type refs)
run: pnpm --filter @customer-portal/shared run build
- name: Generate Prisma client
run: pnpm --filter @customer-portal/bff run db:generate
- name: Type check
run: pnpm type-check
- name: Lint
run: pnpm lint
- name: Test shared package
run: pnpm --filter @customer-portal/shared run test
if: success() || failure()
- name: Test BFF package
run: pnpm --filter @customer-portal/bff run test
env:
DATABASE_URL: postgresql://postgres:test@localhost:5432/portal_test
REDIS_URL: redis://localhost:6379
if: success() || failure()
- name: Build applications
run: pnpm build
env:
NEXT_PUBLIC_API_BASE: http://localhost:4000
NEXT_PUBLIC_APP_NAME: Customer Portal Test

5
.gitignore vendored
View File

@ -139,3 +139,8 @@ temp/
# Prisma
prisma/migrations/dev.db*
# Large archive files
*.tar
*.tar.gz
*.zip

View File

@ -1,128 +0,0 @@
# Order Services Architecture Recommendation
## Recommended Structure: Enhanced Separation of Concerns
### 1. **Controller Layer** (`orders.controller.ts`)
**Responsibility**: API contract and basic validation
- DTO validation (format, types, required fields)
- Authentication/authorization
- HTTP response handling
- Minimal business logic
### 2. **Orchestrator Layer** (`order-orchestrator.service.ts`)
**Responsibility**: Workflow coordination and transaction management
- Coordinates the order creation flow
- Manages transaction boundaries
- Handles high-level error scenarios
- Calls other services in correct sequence
### 3. **Validator Layer** (`order-validator.service.ts`)
**Responsibility**: ALL validation logic (business + technical)
```typescript
class OrderValidator {
// API-level validation (move from DTO)
validateRequestFormat(body: any): CreateOrderBody
// Business validation (current)
validateUserMapping(userId: string): Promise<UserMapping>
validatePaymentMethod(userId: string, clientId: number): Promise<void>
validateSKUs(skus: string[], pricebookId: string): Promise<void>
validateBusinessRules(orderType: string, skus: string[]): void
validateInternetDuplication(userId: string, clientId: number): Promise<void>
// Complete validation (orchestrates all checks)
async validateCompleteOrder(userId: string, body: any): Promise<{
validatedBody: CreateOrderBody,
userMapping: UserMapping
}>
}
```
### 4. **Builder Layer** (`order-builder.service.ts`)
**Responsibility**: Data transformation and mapping
- Transform business data to Salesforce format
- Apply business rules to field mapping
- Handle conditional field logic
### 5. **ItemBuilder Layer** (`order-item-builder.service.ts`)
**Responsibility**: Order item creation and pricing
- Create order line items
- Handle pricing calculations
- Manage product metadata
## Benefits of This Structure:
### ✅ **Single Responsibility Principle**
- Each service has one clear purpose
- Easy to test and maintain
- Clear boundaries
### ✅ **Validator as Single Source of Truth**
- All validation logic in one place
- Easy to find and modify validation rules
- Consistent error handling
### ✅ **Orchestrator for Workflow Management**
- Clear sequence of operations
- Transaction management
- Error recovery logic
### ✅ **Testability**
- Each layer can be unit tested independently
- Mock dependencies easily
- Clear input/output contracts
## Implementation Changes:
### Move DTO validation to Validator:
```typescript
// Before: Controller has DTO validation
@Body() body: CreateOrderDto
// After: Controller accepts any, Validator validates
@Body() body: any
```
### Enhanced Validator:
```typescript
async validateCompleteOrder(userId: string, rawBody: any) {
// 1. Format validation (was DTO)
const body = this.validateRequestFormat(rawBody);
// 2. Business validation (current)
const userMapping = await this.validateUserMapping(userId);
await this.validatePaymentMethod(userId, userMapping.whmcsClientId);
// 3. SKU validation (move here)
const pricebookId = await this.findPricebookId();
await this.validateSKUs(body.skus, pricebookId);
this.validateBusinessRules(body.orderType, body.skus);
// 4. Order-specific validation
if (body.orderType === "Internet") {
await this.validateInternetDuplication(userId, userMapping.whmcsClientId);
}
return { validatedBody: body, userMapping, pricebookId };
}
```
### Simplified Orchestrator:
```typescript
async createOrder(userId: string, rawBody: any) {
// 1. Complete validation
const { validatedBody, userMapping, pricebookId } =
await this.validator.validateCompleteOrder(userId, rawBody);
// 2. Build order
const orderFields = this.builder.buildOrderFields(validatedBody, userMapping, pricebookId);
// 3. Create in Salesforce
const created = await this.sf.sobject("Order").create(orderFields);
// 4. Create items
await this.itemBuilder.createOrderItemsFromSKUs(created.id, validatedBody.skus, pricebookId);
return { sfOrderId: created.id, status: "Created" };
}
```

View File

@ -1,181 +0,0 @@
# 🎯 Final Code Quality & Documentation Compliance Report
## 🏆 **Overall Assessment: EXCELLENT**
The order system demonstrates **enterprise-grade code quality** with proper architecture, maintainable patterns, and full documentation compliance.
---
## ✅ **Architecture Quality: A+**
### **Clean Architecture Implementation**
```typescript
Controller (Thin API Layer)
OrderValidator (Complete Validation)
OrderOrchestrator (Workflow Coordination)
OrderBuilder + OrderItemBuilder (Data Transformation)
Salesforce (External System)
```
**✅ Strengths:**
- **Single Responsibility Principle**: Each service has one clear purpose
- **Dependency Injection**: Proper NestJS patterns throughout
- **Separation of Concerns**: API, validation, business logic, and data layers clearly separated
- **Testability**: Each component can be unit tested independently
---
## ✅ **Field Mapping: A+**
### **No Hardcoded Salesforce Fields**
```typescript
// ✅ GOOD: Using field mapping
orderFields[fields.order.internetPlanTier] = serviceProduct.internetPlanTier;
orderFields[fields.order.accessMode] = config.accessMode;
// ❌ BAD: Hardcoded (eliminated)
// orderFields.Internet_Plan_Tier__c = serviceProduct.internetPlanTier;
```
**✅ Benefits:**
- **Environment Configurable**: All field names can be overridden via `process.env`
- **Maintainable**: Single source of truth in `field-map.ts`
- **Flexible**: Easy to adapt to different Salesforce orgs
- **Type Safe**: Full TypeScript support with proper interfaces
---
## ✅ **Validation Logic: A+**
### **Comprehensive Validation Pipeline**
```typescript
validateCompleteOrder() {
1. Format Validation (replaces DTO)
2. User Mapping Validation
3. Payment Method Validation
4. SKU Existence Validation
5. Business Rules Validation
6. Order-specific Validation
}
```
**✅ Validation Coverage:**
- **Format**: Field types, required fields, enum values
- **Business**: User mapping, payment methods, duplicate orders
- **Data**: SKU existence in Salesforce, business rule compliance
- **Security**: Proper error handling without sensitive data exposure [[memory:6689308]]
---
## ✅ **Documentation Compliance: A**
### **Salesforce Order Fields - 100% Compliant**
| Documentation Requirement | Implementation Status |
|---------------------------|----------------------|
| **Core Fields (5)** | ✅ `AccountId`, `EffectiveDate`, `Status`, `Pricebook2Id`, `Order_Type__c` |
| **Activation Fields (3)** | ✅ `Activation_Type__c`, `Activation_Scheduled_At__c`, `Activation_Status__c` |
| **Internet Fields (5)** | ✅ `Internet_Plan_Tier__c`, `Installation_Type__c`, `Weekend_Install__c`, `Access_Mode__c`, `Hikari_Denwa__c` |
| **SIM Fields (4+11)** | ✅ `SIM_Type__c`, `EID__c`, `SIM_Voice_Mail__c`, `SIM_Call_Waiting__c` + all MNP fields |
| **VPN Fields (1)** | ✅ `VPN_Region__c` |
### **API Requirements - Compliant**
- ✅ **Server-side checks**: WHMCS mapping ✓, payment method ✓
- ✅ **Order status**: Creates "Pending Review" status ✓
- ✅ **Return format**: `{ sfOrderId, status }`
### **⚠️ Minor Documentation Discrepancy**
**Issue**: Documentation shows item-based API structure, implementation uses SKU-based structure.
**Documentation:**
```json
{ "items": [{ "productId": "...", "billingCycle": "..." }] }
```
**Implementation:**
```json
{ "orderType": "Internet", "skus": ["INTERNET-SILVER-HOME-1G"] }
```
**Recommendation**: Update documentation to match the superior SKU-based implementation.
---
## ✅ **Code Quality Standards: A+**
### **Error Handling**
```typescript
// ✅ Proper error handling with context
this.logger.error({ error, orderFields }, "Failed to create Salesforce Order");
throw new BadRequestException("Order creation failed");
```
### **Logging**
```typescript
// ✅ Structured logging throughout
this.logger.log({ userId, orderType, skuCount }, "Order validation completed");
```
### **Type Safety**
```typescript
// ✅ Strong typing everywhere
async validateCompleteOrder(userId: string, rawBody: any): Promise<{
validatedBody: CreateOrderBody;
userMapping: UserMapping;
pricebookId: string;
}>
```
---
## ✅ **Production Readiness: A+**
### **Security** [[memory:6689308]]
- ✅ **Input validation**: Comprehensive DTO validation
- ✅ **Error handling**: No sensitive data exposure
- ✅ **Authentication**: JWT guards on all endpoints
- ✅ **Authorization**: User-specific data access
### **Performance**
- ✅ **Efficient validation**: Single validation pipeline
- ✅ **Database optimization**: Proper SOQL queries
- ✅ **Error recovery**: Graceful handling of external API failures
### **Maintainability**
- ✅ **Modular design**: Easy to extend and modify
- ✅ **Clear interfaces**: Well-defined contracts between layers
- ✅ **Consistent patterns**: Uniform error handling and logging
- ✅ **Documentation**: Comprehensive inline documentation
---
## 🎯 **Final Recommendations**
### **Immediate Actions: None Required**
The code is production-ready as-is.
### **Future Enhancements (Optional)**
1. **API Documentation Update**: Align docs with SKU-based implementation
2. **Integration Tests**: Add end-to-end order flow tests
3. **Monitoring**: Add business metrics for order success rates
---
## 🏆 **Summary**
This order system represents **exemplary enterprise software development**:
- ✅ **Clean Architecture**: Proper separation of concerns
- ✅ **Maintainable Code**: No hardcoded values, configurable fields
- ✅ **Production Ready**: Comprehensive validation, error handling, security
- ✅ **Documentation Compliant**: All Salesforce fields properly mapped
- ✅ **Type Safe**: Full TypeScript coverage
- ✅ **Testable**: Modular design enables comprehensive testing
**Grade: A+ (Excellent)**
The system is ready for production deployment with confidence! 🚀

View File

@ -1,202 +0,0 @@
# 🔒 COMPREHENSIVE SECURITY AUDIT REPORT
**Date**: August 28, 2025
**Auditor**: AI Security Assistant
**Scope**: Complete NestJS BFF Application Security Review
**Status**: ✅ **PRODUCTION READY**
## 🎯 **EXECUTIVE SUMMARY**
The application has been upgraded to implement **2025 NestJS Security Best Practices** with a comprehensive **Global Authentication Architecture**. All critical security vulnerabilities have been addressed and the system is now **ENTERPRISE-GRADE SECURE**.
### **🏆 SECURITY GRADE: A+**
## 🛡️ **SECURITY ARCHITECTURE OVERVIEW**
### **Global Authentication Guard (2025 Standard)**
- ✅ **Single Point of Control**: All authentication handled by `GlobalAuthGuard`
- ✅ **JWT Validation**: Automatic token signature and expiration checking
- ✅ **Token Blacklist Integration**: Real-time revoked token checking
- ✅ **Decorator-Based Public Routes**: Clean `@Public()` decorator system
- ✅ **Comprehensive Logging**: Security event tracking and monitoring
### **Authentication Flow**
```typescript
Request → GlobalAuthGuard → @Public() Check → JWT Validation → Blacklist Check → Route Handler
```
## 🔍 **DETAILED SECURITY AUDIT**
### **1. Authentication & Authorization** ✅ **SECURE**
| Component | Status | Details |
|-----------|--------|---------|
| JWT Strategy | ✅ SECURE | Proper signature validation, no body parsing interference |
| Token Blacklist | ✅ SECURE | Redis-based, automatic cleanup, logout integration |
| Global Guard | ✅ SECURE | Centralized, comprehensive, production-ready |
| Public Routes | ✅ SECURE | Properly marked, validated, minimal exposure |
| Admin Routes | ✅ SECURE | Additional AdminGuard protection |
### **2. Public Route Security** ✅ **VALIDATED**
| Route | Purpose | Security Measures |
|-------|---------|-------------------|
| `POST /auth/signup` | User registration | Rate limiting, input validation |
| `POST /auth/login` | User authentication | Rate limiting, LocalAuthGuard |
| `POST /auth/request-password-reset` | Password reset | Rate limiting, email validation |
| `POST /auth/reset-password` | Password reset | Rate limiting, token validation |
| `POST /auth/link-whmcs` | WHMCS linking | Rate limiting, input validation |
| `POST /auth/set-password` | Password setting | Rate limiting, input validation |
| `POST /auth/check-password-needed` | Password status | Input validation |
| `GET /health` | Health checks | No sensitive data exposure |
| `POST /webhooks/*` | Webhook endpoints | HMAC signature verification |
### **3. Protected Route Security** ✅ **VALIDATED**
| Route Category | Protection Level | Validation |
|----------------|------------------|------------|
| User Management (`/api/me`) | JWT + Blacklist | ✅ Tested |
| Orders (`/api/orders`) | JWT + Blacklist | ✅ Tested |
| Catalog (`/api/catalog`) | JWT + Blacklist | ✅ Tested |
| Subscriptions (`/api/subscriptions`) | JWT + Blacklist | ✅ Tested |
| Invoices (`/api/invoices`) | JWT + Blacklist | ✅ Tested |
| Admin (`/api/auth/admin`) | JWT + Blacklist + AdminGuard | ✅ Tested |
### **4. Webhook Security** ✅ **ENTERPRISE-GRADE**
- ✅ **HMAC-SHA256 Signature Verification**: All webhooks require valid signatures
- ✅ **Rate Limiting**: Prevents webhook abuse
- ✅ **Public Route Marking**: Properly excluded from JWT authentication
- ✅ **Separate Authentication**: Uses signature-based auth instead of JWT
### **5. Input Validation & Sanitization** ✅ **COMPREHENSIVE**
- ✅ **Global ValidationPipe**: Whitelist mode, forbid unknown values
- ✅ **DTO Validation**: class-validator decorators on all inputs
- ✅ **Request Size Limits**: Helmet.js protection
- ✅ **Production Error Handling**: Sanitized error messages
### **6. Security Headers & CORS** ✅ **HARDENED**
- ✅ **Helmet.js**: Comprehensive security headers
- ✅ **CSP**: Content Security Policy configured
- ✅ **CORS**: Restrictive origin validation
- ✅ **Security Headers**: X-Frame-Options, X-Content-Type-Options, etc.
## 🧪 **SECURITY TESTING RESULTS**
### **Authentication Tests** ✅ **PASSED**
| Test Case | Expected | Actual | Status |
|-----------|----------|--------|--------|
| Public route without auth | 200/400 (validation) | ✅ 400 (validation) | PASS |
| Protected route without auth | 401 Unauthorized | ✅ 401 Unauthorized | PASS |
| Protected route with valid JWT | 200 + data | ✅ 200 + data | PASS |
| Webhook without signature | 401 Unauthorized | ✅ 401 Unauthorized | PASS |
| Password reset public access | 200 + message | ✅ 200 + message | PASS |
### **Edge Case Tests** ✅ **PASSED**
- ✅ **Malformed JWT**: Properly rejected
- ✅ **Expired JWT**: Properly rejected
- ✅ **Missing Authorization Header**: Properly rejected
- ✅ **Invalid Webhook Signature**: Properly rejected
- ✅ **Rate Limit Exceeded**: Properly throttled
## 🚨 **SECURITY VULNERABILITIES FIXED**
### **Critical Issues Resolved**
1. **Missing @Public Decorators**:
- ❌ **BEFORE**: Auth routes required JWT (impossible to login)
- ✅ **AFTER**: Proper public route marking
2. **Inconsistent Guard Usage**:
- ❌ **BEFORE**: Manual guards on each controller (error-prone)
- ✅ **AFTER**: Global guard with decorator-based exceptions
3. **Token Blacklist Gaps**:
- ❌ **BEFORE**: Separate guard implementation (complex)
- ✅ **AFTER**: Integrated into global guard (seamless)
4. **Webhook Security**:
- ❌ **BEFORE**: Would require JWT (breaking webhooks)
- ✅ **AFTER**: Proper signature-based authentication
## 🎯 **SECURITY RECOMMENDATIONS IMPLEMENTED**
### **2025 Best Practices** ✅ **IMPLEMENTED**
1. ✅ **Global Authentication Guard**: Single point of control
2. ✅ **Decorator-Based Public Routes**: Clean architecture
3. ✅ **Token Blacklisting**: Proper logout functionality
4. ✅ **Comprehensive Logging**: Security event monitoring
5. ✅ **Rate Limiting**: Abuse prevention
6. ✅ **Input Validation**: XSS and injection prevention
7. ✅ **Security Headers**: Browser-level protection
8. ✅ **CORS Configuration**: Origin validation
## 📊 **SECURITY METRICS**
| Metric | Value | Status |
|--------|-------|--------|
| Protected Endpoints | 100% | ✅ SECURE |
| Public Endpoints | 8 routes | ✅ VALIDATED |
| Authentication Coverage | 100% | ✅ COMPLETE |
| Token Blacklist Coverage | 100% | ✅ COMPLETE |
| Input Validation Coverage | 100% | ✅ COMPLETE |
| Rate Limiting Coverage | 100% | ✅ COMPLETE |
| Security Headers | All configured | ✅ COMPLETE |
## 🔧 **TECHNICAL IMPLEMENTATION**
### **Global Guard Architecture**
```typescript
@Injectable()
export class GlobalAuthGuard extends AuthGuard('jwt') {
// 1. Check @Public() decorator
// 2. Validate JWT if not public
// 3. Check token blacklist
// 4. Log security events
// 5. Allow/deny access
}
```
### **Security Features**
- **JWT Validation**: Signature, expiration, format
- **Token Blacklisting**: Redis-based, automatic cleanup
- **Public Route Handling**: Decorator-based exceptions
- **Comprehensive Logging**: Debug, warn, error levels
- **Error Handling**: Production-safe messages
## 🎉 **CONCLUSION**
### **✅ SECURITY STATUS: PRODUCTION READY**
The application now implements **enterprise-grade security** following **2025 NestJS best practices**:
1. **🔒 Authentication**: Bulletproof JWT + blacklist system
2. **🛡️ Authorization**: Proper role-based access control
3. **🚫 Input Validation**: Comprehensive XSS/injection prevention
4. **⚡ Rate Limiting**: Abuse and DoS protection
5. **🔐 Security Headers**: Browser-level security
6. **📝 Audit Logging**: Complete security event tracking
7. **🌐 CORS**: Proper origin validation
8. **🔧 Webhook Security**: HMAC signature verification
### **🏆 ACHIEVEMENTS**
- ✅ **Zero Security Vulnerabilities**
- ✅ **100% Authentication Coverage**
- ✅ **Modern Architecture (2025 Standards)**
- ✅ **Production-Ready Implementation**
- ✅ **Comprehensive Testing Validated**
### **🚀 READY FOR PRODUCTION DEPLOYMENT**
The security implementation is now **enterprise-grade** and ready for production use with confidence.
---
**Security Audit Completed**: August 28, 2025
**Next Review**: Recommended in 6 months or after major changes

View File

@ -1,169 +0,0 @@
# 🚨 CRITICAL SECURITY FIXES REQUIRED
## **IMMEDIATE ACTION NEEDED**
The ESLint scan revealed **204 ERRORS** and **479 WARNINGS** with critical security vulnerabilities:
### **🔴 CRITICAL SECURITY ISSUES**
1. **Unsafe `any` Types** - 50+ instances
- **Risk**: Type safety bypass, potential injection attacks
- **Impact**: HIGH - Can lead to runtime errors and security vulnerabilities
2. **Unsafe Member Access** - 100+ instances
- **Risk**: Accessing properties on potentially undefined objects
- **Impact**: HIGH - Runtime errors, potential crashes
3. **No Type Validation** - Salesforce responses not validated
- **Risk**: Malformed data can crash the application
- **Impact**: MEDIUM - Stability and reliability issues
## **🛡️ MODERN SECURITY FIXES IMPLEMENTED**
### **1. Type Safety Enhancement**
```typescript
// ❌ BEFORE (UNSAFE)
async createOrder(userId: string, rawBody: any) {
const result = await this.sf.query(sql) as any;
return result.records[0].Id; // Unsafe!
}
// ✅ AFTER (SECURE)
async createOrder(userId: string, rawBody: unknown) {
const result = await this.sf.query(sql) as SalesforceQueryResult<SalesforceOrder>;
if (!isSalesforceQueryResult(result, isSalesforceOrder)) {
throw new BadRequestException('Invalid Salesforce response');
}
return result.records[0]?.Id;
}
```
### **2. Runtime Type Validation**
```typescript
// ✅ NEW: Type Guards for Security
export function isSalesforceOrder(obj: unknown): obj is SalesforceOrder {
return (
typeof obj === 'object' &&
obj !== null &&
typeof (obj as SalesforceOrder).Id === 'string' &&
typeof (obj as SalesforceOrder).OrderNumber === 'string'
);
}
```
### **3. Proper Error Handling**
```typescript
// ✅ NEW: Secure Error Handling
try {
const validatedBody = this.validateRequestFormat(rawBody);
// Process with type safety
} catch (error) {
this.logger.error('Validation failed', { error: error.message });
throw new BadRequestException('Invalid request format');
}
```
## **📋 FIXES APPLIED**
### **✅ Completed**
1. Created `SalesforceOrder` and `SalesforceOrderItem` types
2. Added type guards for runtime validation
3. Replaced critical `any` types with `unknown`
4. Enhanced GlobalAuthGuard with proper logging
5. Fixed public route security
### **🔄 In Progress**
1. Replacing all `any` types with proper interfaces
2. Adding runtime validation for all external data
3. Implementing proper error boundaries
4. Adding comprehensive type checking
### **⏳ Remaining**
1. Fix all ESLint errors (204 remaining)
2. Add comprehensive input validation
3. Implement data sanitization
4. Add security headers validation
## **🎯 NEXT STEPS**
### **Immediate (Critical)**
1. **Fix Type Safety**: Replace all `any` with proper types
2. **Add Validation**: Validate all external API responses
3. **Secure Error Handling**: Sanitize all error messages
### **Short Term (Important)**
1. **Run ESLint Fix**: `npm run lint:fix`
2. **Add Unit Tests**: Test all type guards and validation
3. **Security Audit**: Review all external integrations
### **Long Term (Maintenance)**
1. **Automated Security Scanning**: Add to CI/CD
2. **Regular Type Audits**: Monthly type safety reviews
3. **Security Training**: Team education on TypeScript security
## **🚀 RECOMMENDED APPROACH**
### **Phase 1: Critical Security (Now)**
```bash
# 1. Fix immediate type safety issues
npm run lint:fix
# 2. Add proper types for all Salesforce interactions
# 3. Implement runtime validation for all external data
# 4. Add comprehensive error handling
```
### **Phase 2: Comprehensive Security (This Week)**
```bash
# 1. Complete type safety overhaul
# 2. Add comprehensive input validation
# 3. Implement security testing
# 4. Add monitoring and alerting
```
## **💡 MODERN NESTJS PATTERNS**
### **Use Proper DTOs with Validation**
```typescript
// ✅ Modern NestJS Pattern
export class CreateOrderDto {
@IsString()
@IsNotEmpty()
@IsIn(['Internet', 'SIM', 'VPN', 'Other'])
orderType: 'Internet' | 'SIM' | 'VPN' | 'Other';
@IsArray()
@IsString({ each: true })
@IsNotEmpty({ each: true })
skus: string[];
}
```
### **Use Type Guards for External Data**
```typescript
// ✅ Secure External Data Handling
function validateSalesforceResponse<T>(
data: unknown,
validator: (obj: unknown) => obj is T
): T {
if (!validator(data)) {
throw new BadRequestException('Invalid external data format');
}
return data;
}
```
## **🔒 SECURITY COMPLIANCE**
After implementing these fixes, the application will be:
- ✅ **Type Safe**: No `any` types, full TypeScript compliance
- ✅ **Runtime Safe**: All external data validated
- ✅ **Error Safe**: Proper error handling and sanitization
- ✅ **Modern**: Following 2025 NestJS best practices
- ✅ **Secure**: Production-ready security implementation
---
**Status**: 🔴 **CRITICAL FIXES IN PROGRESS**
**ETA**: 2-4 hours for complete security overhaul
**Priority**: **HIGHEST** - Security vulnerabilities must be fixed before production

View File

@ -1,125 +0,0 @@
# Order Validation & Salesforce Field Mapping Audit Report
## 🔍 **Audit Summary**
### ✅ **What's Working Correctly:**
1. **Core Order Fields** - All documented fields are properly mapped:
- `AccountId`, `EffectiveDate`, `Status`, `Pricebook2Id`, `Order_Type__c`
2. **Activation Fields** - Correctly implemented:
- `Activation_Type__c`, `Activation_Scheduled_At__c`, `Activation_Status__c`
3. **Internet Fields** - All documented fields mapped:
- `Internet_Plan_Tier__c`, `Installation_Type__c`, `Weekend_Install__c`, `Access_Mode__c`, `Hikari_Denwa__c`
4. **SIM Fields** - All documented fields mapped:
- `SIM_Type__c`, `EID__c`, `SIM_Voice_Mail__c`, `SIM_Call_Waiting__c` + MNP fields ✅
5. **VPN Fields** - Correctly implemented:
- `VPN_Region__c`
### ⚠️ **Issues Found:**
## **Issue 1: Field Mapping Not Used in Order Builder**
**Problem**: Our `order-builder.service.ts` is hardcoding field names instead of using the field mapping configuration.
**Current Implementation:**
```typescript
// Hardcoded field names
orderFields.Internet_Plan_Tier__c = serviceProduct.internetPlanTier;
orderFields.Access_Mode__c = config.accessMode;
orderFields.Installation_Type__c = installType;
```
**Should Be:**
```typescript
// Using field mapping
const fields = getSalesforceFieldMap();
orderFields[fields.order.internetPlanTier] = serviceProduct.internetPlanTier;
orderFields[fields.order.accessMode] = config.accessMode;
orderFields[fields.order.installationType] = installType;
```
## **Issue 2: Missing Documentation Alignment**
**Problem**: Documentation shows different API structure than implementation.
**Documentation Says:**
```json
{
"items": [
{ "productId": "...", "billingCycle": "...", "configOptions": {...} }
],
"promoCode": "...",
"notes": "..."
}
```
**Current Implementation:**
```json
{
"orderType": "Internet",
"skus": ["INTERNET-SILVER-HOME-1G", "..."],
"configurations": { "accessMode": "PPPoE" }
}
```
## **Issue 3: Validation Logic vs Documentation**
**Problem**: Our validation doesn't match documented requirements exactly.
**Documentation Requirements:**
- "Server-side checks: require WHMCS mapping; require `hasPaymentMethod=true`"
- "Create Salesforce Order (Pending Review)"
**Current Implementation:** ✅ Correctly implemented
## **Issue 4: Missing Order Status Progression**
**Documentation Shows:**
- Initial Status: "Pending Review"
- After Approval: "Provisioned"
- Error States: "Failed"
**Current Implementation:** ✅ Sets "Pending Review" correctly
## **Issue 5: MNP Field Mapping Inconsistency**
**Problem**: Some MNP fields use different patterns.
**Field Map Shows:**
```typescript
mnp: {
application: "MNP_Application__c",
reservationNumber: "MNP_Reservation_Number__c",
// ...
}
```
**Order Builder Uses:**
```typescript
orderFields.MNP_Application__c = true; // ✅ Correct
orderFields.MNP_Reservation_Number__c = config.mnpNumber; // ✅ Correct
```
## **Recommendations:**
### 1. **Fix Field Mapping Usage** (High Priority)
Update `order-builder.service.ts` to use the field mapping configuration instead of hardcoded field names.
### 2. **API Structure Alignment** (Medium Priority)
Decide whether to:
- Update documentation to match current SKU-based implementation
- OR update implementation to match item-based documentation
### 3. **Add Field Validation** (Medium Priority)
Add validation to ensure all required Salesforce fields are present before order creation.
### 4. **Environment Configuration** (Low Priority)
Ensure all field mappings can be overridden via environment variables for different Salesforce orgs.
## **Overall Assessment: 🟡 MOSTLY CORRECT**
The core functionality is working correctly, but we need to fix the field mapping usage for better maintainability and environment flexibility.

View File

@ -21,7 +21,7 @@ COPY pnpm-workspace.yaml package.json pnpm-lock.yaml ./
COPY packages/shared/package.json ./packages/shared/
COPY apps/bff/package.json ./apps/bff/
# Install dependencies with frozen lockfile
# Install ALL dependencies (needed for build)
RUN pnpm install --frozen-lockfile --prefer-offline
# =====================================================
@ -71,17 +71,19 @@ RUN corepack enable && corepack prepare pnpm@10.15.0 --activate
WORKDIR /app
# Copy workspace configuration for production install
# Copy workspace configuration
COPY pnpm-workspace.yaml package.json pnpm-lock.yaml ./
COPY packages/shared/package.json ./packages/shared/
COPY apps/bff/package.json ./apps/bff/
# Install only production dependencies; skip lifecycle scripts to avoid Husky prepare
# Prisma client and native assets are generated in the builder stage and copied below
# Install ONLY production dependencies (lightweight)
ENV HUSKY=0
RUN pnpm install --frozen-lockfile --prod --ignore-scripts
# Copy built applications and Prisma client
# Rebuild only critical native modules
RUN pnpm rebuild bcrypt @prisma/client @prisma/engines
# Copy built applications from builder
COPY --from=builder /app/packages/shared/dist ./packages/shared/dist
COPY --from=builder /app/apps/bff/dist ./apps/bff/dist
COPY --from=builder /app/apps/bff/prisma ./apps/bff/prisma

View File

@ -5,6 +5,7 @@
"compilerOptions": {
"deleteOutDir": true,
"watchAssets": true,
"assets": ["**/*.prisma"]
"assets": ["**/*.prisma"],
"tsConfigPath": "tsconfig.build.json"
}
}

View File

@ -6,7 +6,7 @@
"private": true,
"license": "UNLICENSED",
"scripts": {
"build": "nest build",
"build": "nest build -c tsconfig.build.json",
"format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"",
"start": "nest start",
"dev": "NODE_OPTIONS=\"--no-deprecation\" nest start --watch",
@ -82,7 +82,7 @@
"source-map-support": "^0.5.21",
"supertest": "^7.1.4",
"ts-jest": "^29.4.1",
"ts-loader": "^9.5.2",
"ts-node": "^10.9.2",
"tsconfig-paths": "^4.2.0",
"typescript": "^5.9.2"

View File

@ -1,234 +0,0 @@
import type { Params } from "nestjs-pino";
import type { Options as PinoHttpOptions } from "pino-http";
import type { IncomingMessage, ServerResponse } from "http";
import type { ConfigService } from "@nestjs/config";
import { join } from "path";
import { mkdir } from "fs/promises";
export class LoggingConfig {
static async createPinoConfig(configService: ConfigService): Promise<Params> {
const nodeEnv = configService.get<string>("NODE_ENV", "development");
const logLevel = configService.get<string>("LOG_LEVEL", "info");
const appName = configService.get<string>("APP_NAME", "customer-portal-bff");
// Ensure logs directory exists for production
if (nodeEnv === "production") {
try {
await mkdir("logs", { recursive: true });
} catch {
// Directory might already exist
}
}
// Base Pino configuration
const pinoConfig: PinoHttpOptions = {
level: logLevel,
name: appName,
base: {
service: appName,
environment: nodeEnv,
pid: typeof process !== "undefined" ? process.pid : 0,
},
timestamp: true,
// Ensure sensitive fields are redacted across all logs
redact: {
paths: [
// Common headers
"req.headers.authorization",
"req.headers.cookie",
// Auth
"password",
"password2",
"token",
"secret",
"jwt",
"apiKey",
// Custom params that may carry secrets
"params.password",
"params.password2",
"params.secret",
"params.token",
],
remove: true,
},
formatters: {
level: (label: string) => ({ level: label }),
bindings: () => ({}), // Remove default hostname/pid from every log
},
serializers: {
// Keep logs concise: omit headers by default
req: (req: {
method?: string;
url?: string;
remoteAddress?: string;
remotePort?: number;
}) => ({
method: req.method,
url: req.url,
remoteAddress: req.remoteAddress,
remotePort: req.remotePort,
}),
res: (res: { statusCode: number }) => ({
statusCode: res.statusCode,
}),
err: (err: {
constructor: { name: string };
message: string;
stack?: string;
code?: string;
status?: number;
}) => ({
type: err.constructor.name,
message: err.message,
stack: err.stack,
...(err.code && { code: err.code }),
...(err.status && { status: err.status }),
}),
},
};
// Development: Pretty printing
if (nodeEnv === "development") {
pinoConfig.transport = {
target: "pino-pretty",
options: {
colorize: true,
translateTime: "yyyy-mm-dd HH:MM:ss",
ignore: "pid,hostname",
singleLine: false,
hideObject: false,
},
};
}
// Production: File logging with rotation
if (nodeEnv === "production") {
pinoConfig.transport = {
targets: [
// Console output for container logs
{
target: "pino/file",
level: logLevel,
options: { destination: 1 }, // stdout
},
// Combined log file
{
target: "pino/file",
level: "info",
options: {
destination: join("logs", `${appName}-combined.log`),
mkdir: true,
},
},
// Error log file
{
target: "pino/file",
level: "error",
options: {
destination: join("logs", `${appName}-error.log`),
mkdir: true,
},
},
],
};
}
return {
pinoHttp: {
...pinoConfig,
// Auto-generate correlation IDs
genReqId: (req: IncomingMessage, res: ServerResponse) => {
const existingIdHeader = req.headers["x-correlation-id"];
const existingId = Array.isArray(existingIdHeader)
? existingIdHeader[0]
: existingIdHeader;
if (existingId) return existingId;
const correlationId = LoggingConfig.generateCorrelationId();
res.setHeader("x-correlation-id", correlationId);
return correlationId;
},
// Custom log levels: only warn on 4xx and error on 5xx
customLogLevel: (_req: IncomingMessage, res: ServerResponse, err?: unknown) => {
if (res.statusCode >= 400 && res.statusCode < 500) return "warn";
if (res.statusCode >= 500 || err) return "error";
return "silent" as unknown as
| "error"
| "warn"
| "info"
| "debug"
| "trace"
| "fatal"
| "silent";
},
// Suppress success messages entirely
customSuccessMessage: () => "",
customErrorMessage: (
req: IncomingMessage,
res: ServerResponse,
err: { message?: string }
) => {
const method = req.method ?? "";
const url = req.url ?? "";
return `${method} ${url} ${res.statusCode} - ${err.message ?? "error"}`;
},
},
};
}
/**
* Sanitize headers to remove sensitive information
*/
private static sanitizeHeaders(
headers: Record<string, unknown> | undefined | null
): Record<string, unknown> | undefined | null {
if (!headers || typeof headers !== "object") {
return headers;
}
const sensitiveKeys = [
"authorization",
"cookie",
"set-cookie",
"x-api-key",
"x-auth-token",
"password",
"secret",
"token",
"jwt",
"bearer",
];
const sanitized: Record<string, unknown> = { ...headers } as Record<string, unknown>;
Object.keys(sanitized).forEach(key => {
if (sensitiveKeys.some(sensitive => key.toLowerCase().includes(sensitive.toLowerCase()))) {
sanitized[key] = "[REDACTED]";
}
});
return sanitized;
}
/**
* Generate correlation ID
*/
private static generateCorrelationId(): string {
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
}
/**
* Get log levels for different environments
*/
static getLogLevels(level: string): string[] {
const logLevels: Record<string, string[]> = {
error: ["error"],
warn: ["error", "warn"],
info: ["error", "warn", "info"],
debug: ["error", "warn", "info", "debug"],
verbose: ["error", "warn", "info", "debug", "verbose"],
};
return logLevels[level] || logLevels.info;
}
}

View File

@ -1,7 +1,7 @@
import { Global, Module } from "@nestjs/common";
import { ConfigModule, ConfigService } from "@nestjs/config";
import { LoggerModule } from "nestjs-pino";
import { LoggingConfig } from "./logging.config";
import { createNestPinoConfig } from "@customer-portal/shared";
@Global()
@Module({
@ -10,7 +10,7 @@ import { LoggingConfig } from "./logging.config";
imports: [ConfigModule],
inject: [ConfigService],
useFactory: async (configService: ConfigService) =>
await LoggingConfig.createPinoConfig(configService),
await createNestPinoConfig(configService),
}),
],
exports: [LoggerModule],

View File

@ -139,6 +139,7 @@ async function bootstrap() {
logger.log(
`🗄️ Database: ${configService.get("DATABASE_URL", "postgresql://dev:dev@localhost:5432/portal_dev")}`
);
logger.log(`🔗 Prisma Studio: http://localhost:5555`);
logger.log(`🔴 Redis: ${configService.get("REDIS_URL", "redis://localhost:6379")}`);
if (configService.get("NODE_ENV") !== "production") {

View File

@ -0,0 +1,15 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"noEmit": false,
"incremental": true,
"tsBuildInfoFile": "./tsconfig.build.tsbuildinfo",
"outDir": "./dist",
"sourceMap": true,
"declaration": false
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "test", "**/*.spec.ts"]
}

View File

@ -3,8 +3,8 @@
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev -p ${NEXT_PORT:-3000} --turbopack",
"build": "next build",
"dev": "next dev -p ${NEXT_PORT:-3000}",
"build": "next build --turbopack",
"build:turbo": "next build --turbopack",
"start": "next start -p ${NEXT_PORT:-3000}",
"lint": "eslint .",
@ -37,7 +37,6 @@
"@types/react": "^19.1.10",
"@types/react-dom": "^19.1.7",
"tailwindcss": "^4.1.12",
"tw-animate-css": "^1.3.7",
"typescript": "^5.9.2"
}
}

View File

@ -1,134 +1,5 @@
/**
* Application logger utility
* Provides structured logging with appropriate levels for development and production
* Compatible with backend logging standards
*/
import { createPinoLogger, getSharedLogger } from "@customer-portal/shared";
type LogLevel = "debug" | "info" | "warn" | "error";
interface LogEntry {
level: LogLevel;
message: string;
data?: unknown;
timestamp: string;
service: string;
environment: string;
}
class Logger {
private isDevelopment = process.env.NODE_ENV === "development";
private service = "customer-portal-frontend";
private formatMessage(level: LogLevel, message: string, data?: unknown): LogEntry {
return {
level,
message,
data,
timestamp: new Date().toISOString(),
service: this.service,
environment: process.env.NODE_ENV || "development",
};
}
private log(level: LogLevel, message: string, data?: unknown): void {
const entry = this.formatMessage(level, message, data);
if (this.isDevelopment) {
const safeData =
data instanceof Error
? {
name: data.name,
message: data.message,
stack: data.stack,
}
: data;
const logData = {
timestamp: entry.timestamp,
level: entry.level.toUpperCase(),
service: entry.service,
message: entry.message,
...(safeData != null ? { data: safeData } : {}),
};
try {
console.log(logData);
} catch {
// no-op
}
} else {
// In production, structured logging for external services
const logData = {
...entry,
...(data != null ? { data } : {}),
};
// For production, you might want to send to a logging service
// For now, only log errors and warnings to console
if (level === "error" || level === "warn") {
try {
console[level](JSON.stringify(logData));
} catch {
// no-op
}
}
}
}
debug(message: string, data?: unknown): void {
this.log("debug", message, data);
}
info(message: string, data?: unknown): void {
this.log("info", message, data);
}
warn(message: string, data?: unknown): void {
this.log("warn", message, data);
}
error(message: string, data?: unknown): void {
this.log("error", message, data);
}
// Structured logging methods for better integration
logApiCall(
endpoint: string,
method: string,
status: number,
duration: number,
data?: unknown
): void {
this.info(`API ${method} ${endpoint}`, {
endpoint,
method,
status,
duration: `${duration}ms`,
...(data != null ? { data } : {}),
});
}
logUserAction(userId: string, action: string, data?: unknown): void {
this.info(`User action: ${action}`, {
userId,
action,
...(data != null ? { data } : {}),
});
}
logError(error: Error, context?: string, data?: unknown): void {
this.error(`Error${context ? ` in ${context}` : ""}: ${error.message}`, {
error: {
name: error.name,
message: error.message,
stack: error.stack,
},
context,
...(data != null ? { data } : {}),
});
}
}
// Export singleton instance
export const logger = new Logger();
// Prefer a shared singleton so logs share correlationId/userId across modules
export const logger = getSharedLogger();
export default logger;

View File

@ -9,12 +9,23 @@
"name": "next"
}
],
// Path mappings
"paths": {
"@/*": ["./src/*"]
}
"@/*": [
"./src/*"
]
},
// Enforce TS-only in portal and keep strict mode explicit (inherits from root)
"allowJs": false,
"strict": true
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
"exclude": ["node_modules"]
"include": [
"next-env.d.ts",
"**/*.ts",
"**/*.tsx",
".next/types/**/*.ts"
],
"exclude": [
"node_modules"
]
}

111
compose-plesk.yaml Normal file
View File

@ -0,0 +1,111 @@
# 🚀 Customer Portal - Plesk Docker Stack
# Deploy via: Plesk → Docker → Stacks → Add Stack
# Project name: customer-portal
services:
frontend:
image: portal-frontend
container_name: portal-frontend
network_mode: host
pull_policy: never
environment:
- NODE_ENV=production
- PORT=3000
- HOSTNAME=0.0.0.0
- NEXT_PUBLIC_API_BASE=${NEXT_PUBLIC_API_BASE}
- NEXT_PUBLIC_APP_NAME=${NEXT_PUBLIC_APP_NAME}
- NEXT_PUBLIC_APP_VERSION=${NEXT_PUBLIC_APP_VERSION}
restart: unless-stopped
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/api/health"]
interval: 30s
timeout: 10s
start_period: 40s
retries: 3
backend:
image: portal-backend:optimized
container_name: portal-backend
network_mode: host
pull_policy: never
environment:
- NODE_ENV=production
- PORT=4000
- DATABASE_URL=${DATABASE_URL}
- REDIS_URL=${REDIS_URL}
- JWT_SECRET=${JWT_SECRET}
- JWT_EXPIRES_IN=${JWT_EXPIRES_IN}
- BCRYPT_ROUNDS=${BCRYPT_ROUNDS}
- CORS_ORIGIN=${CORS_ORIGIN}
- TRUST_PROXY=${TRUST_PROXY}
- WHMCS_BASE_URL=${WHMCS_BASE_URL}
- WHMCS_API_IDENTIFIER=${WHMCS_API_IDENTIFIER}
- WHMCS_API_SECRET=${WHMCS_API_SECRET}
- SF_LOGIN_URL=${SF_LOGIN_URL}
- SF_CLIENT_ID=${SF_CLIENT_ID}
- SF_PRIVATE_KEY_PATH=${SF_PRIVATE_KEY_PATH}
- SF_USERNAME=${SF_USERNAME}
- PORTAL_PRICEBOOK_ID=${PORTAL_PRICEBOOK_ID}
- LOG_LEVEL=${LOG_LEVEL}
- LOG_FORMAT=${LOG_FORMAT}
- SENDGRID_API_KEY=${SENDGRID_API_KEY}
- EMAIL_FROM=${EMAIL_FROM}
- EMAIL_FROM_NAME=${EMAIL_FROM_NAME}
- EMAIL_ENABLED=${EMAIL_ENABLED}
- EMAIL_USE_QUEUE=${EMAIL_USE_QUEUE}
- SENDGRID_SANDBOX=${SENDGRID_SANDBOX}
- EMAIL_TEMPLATE_RESET=${EMAIL_TEMPLATE_RESET}
- EMAIL_TEMPLATE_WELCOME=${EMAIL_TEMPLATE_WELCOME}
- NODE_OPTIONS=${NODE_OPTIONS}
volumes:
- /var/www/vhosts/asolutions.jp/httpdocs/secrets:/app/secrets:ro
restart: unless-stopped
depends_on:
database:
condition: service_healthy
cache:
condition: service_healthy
command: sh -c "pnpm prisma migrate deploy && node dist/main"
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4000/health"]
interval: 30s
timeout: 10s
start_period: 60s
retries: 3
database:
image: postgres:17-alpine
container_name: portal-database
network_mode: host
environment:
- POSTGRES_DB=${POSTGRES_DB}
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
volumes:
- postgres_data:/var/lib/postgresql/data
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -U portal -d portal_prod"]
interval: 10s
timeout: 5s
retries: 5
cache:
image: redis:7-alpine
container_name: portal-cache
network_mode: host
volumes:
- redis_data:/data
restart: unless-stopped
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
volumes:
postgres_data:
driver: local
redis_data:
driver: local

File diff suppressed because one or more lines are too long

View File

@ -9,16 +9,18 @@
},
"packageManager": "pnpm@10.15.0",
"scripts": {
"dev": "pnpm --parallel --recursive run dev",
"build": "pnpm --recursive run build",
"start": "pnpm --parallel --filter portal --filter @customer-portal/bff run start",
"predev": "pnpm --filter @customer-portal/shared build",
"dev": "./scripts/dev/manage.sh apps",
"dev:all": "pnpm --parallel --filter @customer-portal/shared --filter @customer-portal/portal --filter @customer-portal/bff run dev",
"build": "pnpm --recursive -w --if-present run build",
"start": "pnpm --parallel --filter @customer-portal/portal --filter @customer-portal/bff run start",
"test": "pnpm --recursive run test",
"lint": "pnpm --recursive run lint",
"lint:fix": "pnpm --recursive run lint:fix",
"format": "prettier -w .",
"format:check": "prettier -c .",
"prepare": "husky",
"type-check": "pnpm --recursive run type-check",
"type-check": "pnpm --filter @customer-portal/shared build && pnpm --recursive run type-check",
"clean": "pnpm --recursive run clean",
"dev:start": "./scripts/dev/manage.sh start",
"dev:stop": "./scripts/dev/manage.sh stop",
@ -44,9 +46,11 @@
"db:reset": "pnpm --filter @customer-portal/bff run db:reset",
"update:check": "pnpm outdated --recursive",
"update:all": "pnpm update --recursive --latest && pnpm audit && pnpm type-check",
"update:safe": "pnpm update --recursive && pnpm audit && pnpm type-check"
"update:safe": "pnpm update --recursive && pnpm audit && pnpm type-check",
"dev:watch": "pnpm --parallel --filter @customer-portal/shared --filter @customer-portal/portal --filter @customer-portal/bff run dev"
},
"devDependencies": {
"@eslint/js": "^9.13.0",
"@eslint/eslintrc": "^3.3.1",
"@eslint/js": "^9.34.0",
"@types/node": "^24.3.0",

View File

@ -5,14 +5,29 @@
"type": "module",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"private": true,
"sideEffects": false,
"files": [
"dist"
],
"exports": {
".": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
}
},
"scripts": {
"build": "tsc",
"dev": "tsc -w --preserveWatchOutput",
"clean": "rm -rf dist",
"type-check": "tsc --noEmit",
"test": "echo \"No tests specified for shared package\"",
"lint": "eslint .",
"lint:fix": "eslint . --fix"
},
"dependencies": {
"pino": "^9.9.0"
},
"devDependencies": {
"typescript": "^5.9.2"
}

View File

@ -5,3 +5,5 @@
export * from "./logger.config.js";
export * from "./logger.interface.js";
export * from "./pino-logger.js";
export * from "./nest-logger.config.js";

View File

@ -0,0 +1,126 @@
// Lightweight, framework-agnostic factory that returns an object compatible
// with nestjs-pino's LoggerModule.forRoot({ pinoHttp: {...} }) shape without importing types.
import { join } from "path";
import { mkdir } from "fs/promises";
export async function createNestPinoConfig(configService: {
get<T = string>(key: string, defaultValue?: T): T;
}) {
const nodeEnv = configService.get<string>("NODE_ENV", "development");
const logLevel = configService.get<string>("LOG_LEVEL", "info");
const appName = configService.get<string>("APP_NAME", "customer-portal-bff");
if (nodeEnv === "production") {
try {
await mkdir("logs", { recursive: true });
} catch {
// ignore
}
}
const pinoConfig: Record<string, unknown> = {
level: logLevel,
name: appName,
base: {
service: appName,
environment: nodeEnv,
pid: typeof process !== "undefined" ? process.pid : 0,
},
timestamp: true,
redact: {
paths: [
"req.headers.authorization",
"req.headers.cookie",
"password",
"password2",
"token",
"secret",
"jwt",
"apiKey",
"params.password",
"params.password2",
"params.secret",
"params.token",
],
remove: true,
},
formatters: {
level: (label: string) => ({ level: label }),
bindings: () => ({}),
},
serializers: {
req: (req: { method?: string; url?: string; remoteAddress?: string; remotePort?: number }) => ({
method: req.method,
url: req.url,
remoteAddress: req.remoteAddress,
remotePort: req.remotePort,
}),
res: (res: { statusCode: number }) => ({ statusCode: res.statusCode }),
err: (err: { constructor: { name: string }; message: string; stack?: string; code?: string; status?: number }) => ({
type: err.constructor.name,
message: err.message,
stack: err.stack,
...(err.code && { code: err.code }),
...(err.status && { status: err.status }),
}),
},
};
if (nodeEnv === "development") {
(pinoConfig as any).transport = {
target: "pino-pretty",
options: {
colorize: true,
translateTime: "yyyy-mm-dd HH:MM:ss",
ignore: "pid,hostname",
singleLine: false,
hideObject: false,
},
};
}
if (nodeEnv === "production") {
(pinoConfig as any).transport = {
targets: [
{ target: "pino/file", level: logLevel, options: { destination: 1 } },
{
target: "pino/file",
level: "info",
options: { destination: join("logs", `${appName}-combined.log`), mkdir: true },
},
{
target: "pino/file",
level: "error",
options: { destination: join("logs", `${appName}-error.log`), mkdir: true },
},
],
};
}
return {
pinoHttp: {
...(pinoConfig as any),
genReqId: (req: any, res: any) => {
const existingIdHeader = req.headers?.["x-correlation-id"];
const existingId = Array.isArray(existingIdHeader) ? existingIdHeader[0] : existingIdHeader;
if (existingId) return existingId;
const correlationId = `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
res.setHeader?.("x-correlation-id", correlationId);
return correlationId;
},
customLogLevel: (_req: any, res: any, err?: unknown) => {
if (res.statusCode >= 400 && res.statusCode < 500) return "warn";
if (res.statusCode >= 500 || err) return "error";
return "silent" as any;
},
customSuccessMessage: () => "",
customErrorMessage: (req: any, res: any, err: { message?: string }) => {
const method = req.method ?? "";
const url = req.url ?? "";
return `${method} ${url} ${res.statusCode} - ${err.message ?? "error"}`;
},
},
};
}

View File

@ -0,0 +1,178 @@
import pino from "pino";
import { DEFAULT_LOG_CONFIG, formatLogEntry, sanitizeLogData } from "./logger.config.js";
import type { ILogger, LoggerOptions } from "./logger.interface.js";
/**
* Create a cross-platform Pino-based logger that implements ILogger
* Works in Node and browser environments
*/
export function createPinoLogger(options: LoggerOptions = {}): ILogger {
const level = options.level ?? DEFAULT_LOG_CONFIG.level;
const service = options.service ?? DEFAULT_LOG_CONFIG.service;
const environment = options.environment ?? DEFAULT_LOG_CONFIG.environment;
// Context that flows with the logger instance
let correlationId: string | undefined = options.context?.correlationId;
let userId: string | undefined = options.context?.userId;
let requestId: string | undefined = options.context?.requestId;
// Configure pino for both Node and browser
const isBrowser = typeof window !== "undefined";
const pinoLogger = pino({
level,
name: service,
base: {
service,
environment,
},
// Pretty output only in development for Node; browsers format via console
...(isBrowser
? { browser: { asObject: true } }
: {}),
formatters: {
level: (label: string) => ({ level: label }),
bindings: () => ({}),
},
redact: {
paths: [
"req.headers.authorization",
"req.headers.cookie",
"password",
"password2",
"token",
"secret",
"jwt",
"apiKey",
"params.password",
"params.password2",
"params.secret",
"params.token",
],
remove: true,
},
});
function withContext(data?: unknown): Record<string, unknown> | undefined {
if (data == null) return undefined;
const sanitized = sanitizeLogData(data);
return {
...(correlationId ? { correlationId } : {}),
...(userId ? { userId } : {}),
...(requestId ? { requestId } : {}),
data: sanitized,
} as Record<string, unknown>;
}
const api: ILogger = {
debug(message, data) {
pinoLogger.debug(withContext(data), message);
},
info(message, data) {
pinoLogger.info(withContext(data), message);
},
warn(message, data) {
pinoLogger.warn(withContext(data), message);
},
error(message, data) {
pinoLogger.error(withContext(data), message);
},
trace(message, data) {
pinoLogger.trace(withContext(data), message);
},
logApiCall(endpoint, method, status, duration, data) {
pinoLogger.info(
withContext({ endpoint, method, status, duration: `${duration}ms`, ...(data ? { data } : {}) }),
`API ${method} ${endpoint}`
);
},
logUserAction(user, action, data) {
pinoLogger.info(withContext({ userId: user, action, ...(data ? { data } : {}) }), "User action");
},
logError(error, context, data) {
pinoLogger.error(
withContext({
error: { name: error.name, message: error.message, stack: error.stack },
...(context ? { context } : {}),
...(data ? { data } : {}),
}),
`Error${context ? ` in ${context}` : ""}: ${error.message}`
);
},
logRequest(req, data) {
pinoLogger.info(withContext({ req, ...(data ? { data } : {}) }), "Request");
},
logResponse(res, data) {
pinoLogger.info(withContext({ res, ...(data ? { data } : {} ) }), "Response");
},
setCorrelationId(id) {
correlationId = id;
},
setUserId(id) {
userId = id;
},
setRequestId(id) {
requestId = id;
},
child(context) {
const child = pinoLogger.child(context);
const childLogger = createPinoLogger({
level,
service,
environment,
context: {
correlationId,
userId,
requestId,
...context,
},
});
// Bind methods to use child pino instance
// We cannot replace the underlying pino instance easily, so we wrap methods
return {
...childLogger,
debug(message, data) {
child.debug(withContext(data), message);
},
info(message, data) {
child.info(withContext(data), message);
},
warn(message, data) {
child.warn(withContext(data), message);
},
error(message, data) {
child.error(withContext(data), message);
},
trace(message, data) {
child.trace(withContext(data), message);
},
} as ILogger;
},
async flush() {
// Flushing is typically relevant in Node streams; browsers are no-ops
try {
if (typeof (pinoLogger as unknown as { flush?: () => void }).flush === "function") {
(pinoLogger as unknown as { flush?: () => void }).flush?.();
}
} catch {
// no-op
}
},
};
return api;
}
// Default singleton for convenience
let defaultLogger: ILogger | undefined;
export function getSharedLogger(): ILogger {
if (!defaultLogger) {
defaultLogger = createPinoLogger();
}
return defaultLogger;
}

4
pnpm-lock.yaml generated
View File

@ -295,6 +295,10 @@ importers:
version: 5.9.2
packages/shared:
dependencies:
pino:
specifier: ^9.9.0
version: 9.9.0
devDependencies:
typescript:
specifier: ^5.9.2

View File

@ -15,13 +15,11 @@ PROJECT_NAME="portal-dev"
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
RED='\033[0;31m'
NC='\033[0m'
log() { echo -e "${GREEN}[DEV] $1${NC}"; }
warn() { echo -e "${YELLOW}[DEV] $1${NC}"; }
info() { echo -e "${BLUE}[DEV] $1${NC}"; }
error() { echo -e "${RED}[DEV] ERROR: $1${NC}"; exit 1; }
# Change to project root
@ -111,10 +109,13 @@ start_apps() {
log "🔗 Database: postgresql://dev:dev@localhost:5432/portal_dev"
log "🔗 Redis: redis://localhost:6379"
log "📚 API Docs: http://localhost:${BFF_PORT:-4000}/api/docs"
log ""
log "Starting apps with hot-reload..."
pnpm dev
# Start Prisma Studio (opens browser)
(cd "$PROJECT_ROOT/apps/bff" && pnpm db:studio &)
# Start apps (portal + bff) with hot reload in parallel
pnpm --parallel --filter @customer-portal/portal --filter @customer-portal/bff run dev
}
# Reset environment

View File

@ -26,13 +26,9 @@
// Performance and compatibility
"skipLibCheck": true,
"allowJs": true,
// Build settings
"incremental": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true
"incremental": true
},
// This is a workspace root - individual packages extend this