Update .gitignore to exclude package-lock.json and remove eslint-report.json. Refactor package.json scripts for build and dev processes, and update dependencies for uuid and @types/uuid. Enhance BFF module structure by adding SecurityModule and QueueModule, and improve error handling in GlobalExceptionFilter. Streamline Salesforce and WHMCS integrations with updated service methods and type definitions for better maintainability. Adjust API paths in portal components for consistency and clarity.
This commit is contained in:
parent
29366d6ae6
commit
4b877fb3e0
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,6 +1,7 @@
|
||||
# Dependencies
|
||||
node_modules/
|
||||
.pnpm-store/
|
||||
**/package-lock.json
|
||||
|
||||
# Environment files
|
||||
.env
|
||||
|
||||
@ -6,21 +6,21 @@
|
||||
"private": true,
|
||||
"license": "UNLICENSED",
|
||||
"scripts": {
|
||||
"build": "NODE_OPTIONS=\"--max-old-space-size=8192\" nest build -c tsconfig.build.json",
|
||||
"build": "nest build",
|
||||
"format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"",
|
||||
"start": "nest start",
|
||||
"dev": "NODE_OPTIONS=\"--no-deprecation --max-old-space-size=4096\" nest start --watch --preserveWatchOutput -c tsconfig.build.json",
|
||||
"start:debug": "NODE_OPTIONS=\"--no-deprecation --max-old-space-size=4096\" nest start --debug --watch",
|
||||
"dev": "nest start --watch --preserveWatchOutput",
|
||||
"start:debug": "nest start --debug --watch",
|
||||
"start:prod": "node dist/main",
|
||||
"lint": "NODE_OPTIONS=\"--max-old-space-size=4096\" eslint .",
|
||||
"lint:fix": "NODE_OPTIONS=\"--max-old-space-size=4096\" eslint . --fix",
|
||||
"test": "NODE_OPTIONS=\"--max-old-space-size=4096\" jest",
|
||||
"test:watch": "NODE_OPTIONS=\"--max-old-space-size=4096\" jest --watch",
|
||||
"test:cov": "NODE_OPTIONS=\"--max-old-space-size=4096\" jest --coverage",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watch",
|
||||
"test:cov": "jest --coverage",
|
||||
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
|
||||
"test:e2e": "NODE_OPTIONS=\"--max-old-space-size=4096\" jest --config ./test/jest-e2e.json",
|
||||
"type-check": "NODE_OPTIONS=\"--max-old-space-size=7168 --max-semi-space-size=256\" tsc --project tsconfig.json --noEmit",
|
||||
"type-check:watch": "NODE_OPTIONS=\"--max-old-space-size=7168 --max-semi-space-size=256\" tsc --project tsconfig.json --noEmit --watch",
|
||||
"test:e2e": "jest --config ./test/jest-e2e.json",
|
||||
"type-check": "tsc --project tsconfig.json --noEmit",
|
||||
"type-check:watch": "tsc --project tsconfig.json --noEmit --watch",
|
||||
"clean": "rm -rf dist",
|
||||
"db:migrate": "prisma migrate dev",
|
||||
"db:generate": "prisma generate",
|
||||
@ -42,19 +42,20 @@
|
||||
"@nestjs/swagger": "^11.2.0",
|
||||
"@nestjs/throttler": "^6.4.0",
|
||||
"@prisma/client": "^6.14.0",
|
||||
"@sendgrid/mail": "^8.1.3",
|
||||
"@sendgrid/mail": "^8.1.6",
|
||||
"bcrypt": "^6.0.0",
|
||||
"bullmq": "^5.58.0",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.2",
|
||||
"cookie-parser": "^1.4.7",
|
||||
"express": "^4.21.2",
|
||||
"express": "^5.1.0",
|
||||
"helmet": "^8.1.0",
|
||||
"ioredis": "^5.7.0",
|
||||
"jsforce": "^3.10.4",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"nestjs-pino": "^4.4.0",
|
||||
"nestjs-zod": "^5.0.1",
|
||||
"p-queue": "^7.4.1",
|
||||
"passport": "^0.7.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"passport-local": "^1.0.0",
|
||||
@ -65,7 +66,7 @@
|
||||
"rxjs": "^7.8.2",
|
||||
"salesforce-pubsub-api-client": "^5.5.0",
|
||||
"speakeasy": "^2.0.0",
|
||||
"uuid": "^11.1.0",
|
||||
"uuid": "^13.0.0",
|
||||
"zod": "^4.1.9"
|
||||
},
|
||||
"devDependencies": {
|
||||
@ -82,7 +83,7 @@
|
||||
"@types/passport-local": "^1.0.38",
|
||||
"@types/speakeasy": "^2.0.10",
|
||||
"@types/supertest": "^6.0.3",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@types/uuid": "^11.0.0",
|
||||
"jest": "^30.0.5",
|
||||
"prisma": "^6.14.0",
|
||||
"source-map-support": "^0.5.21",
|
||||
|
||||
@ -10,6 +10,7 @@ import { apiRoutes } from "@bff/core/config/router.config";
|
||||
|
||||
// Core Modules
|
||||
import { LoggingModule } from "@bff/core/logging/logging.module";
|
||||
import { SecurityModule } from "@bff/core/security/security.module";
|
||||
import { PrismaModule } from "@bff/infra/database/prisma.module";
|
||||
import { RedisModule } from "@bff/infra/redis/redis.module";
|
||||
import { CacheModule } from "@bff/infra/cache/cache.module";
|
||||
@ -52,6 +53,7 @@ import { HealthModule } from "@bff/modules/health/health.module";
|
||||
|
||||
// === INFRASTRUCTURE ===
|
||||
LoggingModule,
|
||||
SecurityModule,
|
||||
ThrottlerModule.forRootAsync({
|
||||
imports: [ConfigModule],
|
||||
inject: [ConfigService],
|
||||
|
||||
@ -20,6 +20,7 @@ declare global {
|
||||
|
||||
import { GlobalExceptionFilter } from "@bff/core/http/http-exception.filter";
|
||||
import { AuthErrorFilter } from "@bff/core/http/auth-error.filter";
|
||||
import { SecureErrorMapperService } from "@bff/core/security/services/secure-error-mapper.service";
|
||||
|
||||
import { AppModule } from "../app.module";
|
||||
|
||||
@ -134,7 +135,7 @@ export async function bootstrap(): Promise<INestApplication> {
|
||||
// Global exception filters
|
||||
app.useGlobalFilters(
|
||||
new AuthErrorFilter(app.get(Logger)), // Handle auth errors first
|
||||
new GlobalExceptionFilter(app.get(Logger), configService) // Handle all other errors
|
||||
new GlobalExceptionFilter(app.get(Logger), configService, app.get(SecureErrorMapperService)) // Handle all other errors
|
||||
);
|
||||
|
||||
// Global authentication guard will be registered via APP_GUARD provider in AuthModule
|
||||
|
||||
10
apps/bff/src/core/config/config.module.ts
Normal file
10
apps/bff/src/core/config/config.module.ts
Normal file
@ -0,0 +1,10 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { ConfigModule } from "@nestjs/config";
|
||||
import { SalesforceFieldMapService } from "./field-map";
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule],
|
||||
providers: [SalesforceFieldMapService],
|
||||
exports: [SalesforceFieldMapService],
|
||||
})
|
||||
export class CoreConfigModule {}
|
||||
@ -66,6 +66,74 @@ export const envSchema = z.object({
|
||||
FREEBIT_TIMEOUT: z.coerce.number().int().positive().default(30000),
|
||||
FREEBIT_RETRY_ATTEMPTS: z.coerce.number().int().positive().default(3),
|
||||
FREEBIT_DETAILS_ENDPOINT: z.string().default("/master/getAcnt/"),
|
||||
|
||||
// Portal Configuration
|
||||
PORTAL_PRICEBOOK_ID: z.string().default("01sTL000008eLVlYAM"),
|
||||
PORTAL_PRICEBOOK_NAME: z.string().default("Portal"),
|
||||
|
||||
// Salesforce Field Mappings - Account
|
||||
ACCOUNT_INTERNET_ELIGIBILITY_FIELD: z.string().default("Internet_Eligibility__c"),
|
||||
ACCOUNT_CUSTOMER_NUMBER_FIELD: z.string().default("SF_Account_No__c"),
|
||||
|
||||
// Salesforce Field Mappings - Product
|
||||
PRODUCT_SKU_FIELD: z.string().default("StockKeepingUnit"),
|
||||
PRODUCT_PORTAL_CATEGORY_FIELD: z.string().default("Product2Categories1__c"),
|
||||
PRODUCT_PORTAL_CATALOG_FIELD: z.string().default("Portal_Catalog__c"),
|
||||
PRODUCT_PORTAL_ACCESSIBLE_FIELD: z.string().default("Portal_Accessible__c"),
|
||||
PRODUCT_ITEM_CLASS_FIELD: z.string().default("Item_Class__c"),
|
||||
PRODUCT_BILLING_CYCLE_FIELD: z.string().default("Billing_Cycle__c"),
|
||||
PRODUCT_WHMCS_PRODUCT_ID_FIELD: z.string().default("WH_Product_ID__c"),
|
||||
PRODUCT_WHMCS_PRODUCT_NAME_FIELD: z.string().default("WH_Product_Name__c"),
|
||||
PRODUCT_INTERNET_PLAN_TIER_FIELD: z.string().default("Internet_Plan_Tier__c"),
|
||||
PRODUCT_INTERNET_OFFERING_TYPE_FIELD: z.string().default("Internet_Offering_Type__c"),
|
||||
PRODUCT_DISPLAY_ORDER_FIELD: z.string().default("Catalog_Order__c"),
|
||||
PRODUCT_BUNDLED_ADDON_FIELD: z.string().default("Bundled_Addon__c"),
|
||||
PRODUCT_IS_BUNDLED_ADDON_FIELD: z.string().default("Is_Bundled_Addon__c"),
|
||||
PRODUCT_SIM_DATA_SIZE_FIELD: z.string().default("SIM_Data_Size__c"),
|
||||
PRODUCT_SIM_PLAN_TYPE_FIELD: z.string().default("SIM_Plan_Type__c"),
|
||||
PRODUCT_SIM_HAS_FAMILY_DISCOUNT_FIELD: z.string().default("SIM_Has_Family_Discount__c"),
|
||||
PRODUCT_VPN_REGION_FIELD: z.string().default("VPN_Region__c"),
|
||||
|
||||
// Salesforce Field Mappings - Order
|
||||
ORDER_TYPE_FIELD: z.string().default("Type"),
|
||||
ORDER_ACTIVATION_TYPE_FIELD: z.string().default("Activation_Type__c"),
|
||||
ORDER_ACTIVATION_SCHEDULED_AT_FIELD: z.string().default("Activation_Scheduled_At__c"),
|
||||
ORDER_ACTIVATION_STATUS_FIELD: z.string().default("Activation_Status__c"),
|
||||
ORDER_INTERNET_PLAN_TIER_FIELD: z.string().default("Internet_Plan_Tier__c"),
|
||||
ORDER_INSTALLATION_TYPE_FIELD: z.string().default("Installment_Plan__c"),
|
||||
ORDER_WEEKEND_INSTALL_FIELD: z.string().default("Weekend_Install__c"),
|
||||
ORDER_ACCESS_MODE_FIELD: z.string().default("Access_Mode__c"),
|
||||
ORDER_HIKARI_DENWA_FIELD: z.string().default("Hikari_Denwa__c"),
|
||||
ORDER_VPN_REGION_FIELD: z.string().default("VPN_Region__c"),
|
||||
ORDER_SIM_TYPE_FIELD: z.string().default("SIM_Type__c"),
|
||||
ORDER_EID_FIELD: z.string().default("EID__c"),
|
||||
ORDER_SIM_VOICE_MAIL_FIELD: z.string().default("SIM_Voice_Mail__c"),
|
||||
ORDER_SIM_CALL_WAITING_FIELD: z.string().default("SIM_Call_Waiting__c"),
|
||||
ORDER_MNP_APPLICATION_FIELD: z.string().default("MNP_Application__c"),
|
||||
ORDER_MNP_RESERVATION_FIELD: z.string().default("MNP_Reservation_Number__c"),
|
||||
ORDER_MNP_EXPIRY_FIELD: z.string().default("MNP_Expiry_Date__c"),
|
||||
ORDER_MNP_PHONE_FIELD: z.string().default("MNP_Phone_Number__c"),
|
||||
ORDER_MVNO_ACCOUNT_NUMBER_FIELD: z.string().default("MVNO_Account_Number__c"),
|
||||
ORDER_PORTING_DOB_FIELD: z.string().default("Porting_DateOfBirth__c"),
|
||||
ORDER_PORTING_FIRST_NAME_FIELD: z.string().default("Porting_FirstName__c"),
|
||||
ORDER_PORTING_LAST_NAME_FIELD: z.string().default("Porting_LastName__c"),
|
||||
ORDER_PORTING_FIRST_NAME_KATAKANA_FIELD: z.string().default("Porting_FirstName_Katakana__c"),
|
||||
ORDER_PORTING_LAST_NAME_KATAKANA_FIELD: z.string().default("Porting_LastName_Katakana__c"),
|
||||
ORDER_PORTING_GENDER_FIELD: z.string().default("Porting_Gender__c"),
|
||||
ORDER_WHMCS_ORDER_ID_FIELD: z.string().default("WHMCS_Order_ID__c"),
|
||||
ORDER_ACTIVATION_ERROR_CODE_FIELD: z.string().default("Activation_Error_Code__c"),
|
||||
ORDER_ACTIVATION_ERROR_MESSAGE_FIELD: z.string().default("Activation_Error_Message__c"),
|
||||
ORDER_ACTIVATION_LAST_ATTEMPT_AT_FIELD: z.string().default("ActivatedDate"),
|
||||
ORDER_ADDRESS_CHANGED_FIELD: z.string().default("Address_Changed__c"),
|
||||
ORDER_BILLING_STREET_FIELD: z.string().default("BillingStreet"),
|
||||
ORDER_BILLING_CITY_FIELD: z.string().default("BillingCity"),
|
||||
ORDER_BILLING_STATE_FIELD: z.string().default("BillingState"),
|
||||
ORDER_BILLING_POSTAL_CODE_FIELD: z.string().default("BillingPostalCode"),
|
||||
ORDER_BILLING_COUNTRY_FIELD: z.string().default("BillingCountry"),
|
||||
|
||||
// Salesforce Field Mappings - Order Item
|
||||
ORDER_ITEM_BILLING_CYCLE_FIELD: z.string().default("Billing_Cycle__c"),
|
||||
ORDER_ITEM_WHMCS_SERVICE_ID_FIELD: z.string().default("WHMCS_Service_ID__c"),
|
||||
});
|
||||
|
||||
export function validate(config: Record<string, unknown>): Record<string, unknown> {
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
import type { SalesforceProductFieldMap } from "@customer-portal/domain";
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
|
||||
export type SalesforceFieldMap = {
|
||||
account: {
|
||||
@ -53,148 +55,159 @@ export type SalesforceFieldMap = {
|
||||
};
|
||||
};
|
||||
|
||||
export function getSalesforceFieldMap(): SalesforceFieldMap {
|
||||
return {
|
||||
account: {
|
||||
internetEligibility:
|
||||
process.env.ACCOUNT_INTERNET_ELIGIBILITY_FIELD || "Internet_Eligibility__c",
|
||||
customerNumber: process.env.ACCOUNT_CUSTOMER_NUMBER_FIELD || "SF_Account_No__c",
|
||||
},
|
||||
product: {
|
||||
sku: process.env.PRODUCT_SKU_FIELD || "StockKeepingUnit",
|
||||
portalCategory: process.env.PRODUCT_PORTAL_CATEGORY_FIELD || "Product2Categories1__c",
|
||||
portalCatalog: process.env.PRODUCT_PORTAL_CATALOG_FIELD || "Portal_Catalog__c",
|
||||
portalAccessible: process.env.PRODUCT_PORTAL_ACCESSIBLE_FIELD || "Portal_Accessible__c",
|
||||
itemClass: process.env.PRODUCT_ITEM_CLASS_FIELD || "Item_Class__c",
|
||||
billingCycle: process.env.PRODUCT_BILLING_CYCLE_FIELD || "Billing_Cycle__c",
|
||||
whmcsProductId: process.env.PRODUCT_WHMCS_PRODUCT_ID_FIELD || "WH_Product_ID__c",
|
||||
whmcsProductName: process.env.PRODUCT_WHMCS_PRODUCT_NAME_FIELD || "WH_Product_Name__c",
|
||||
internetPlanTier: process.env.PRODUCT_INTERNET_PLAN_TIER_FIELD || "Internet_Plan_Tier__c",
|
||||
internetOfferingType:
|
||||
process.env.PRODUCT_INTERNET_OFFERING_TYPE_FIELD || "Internet_Offering_Type__c",
|
||||
displayOrder: process.env.PRODUCT_DISPLAY_ORDER_FIELD || "Catalog_Order__c",
|
||||
bundledAddon: process.env.PRODUCT_BUNDLED_ADDON_FIELD || "Bundled_Addon__c",
|
||||
isBundledAddon: process.env.PRODUCT_IS_BUNDLED_ADDON_FIELD || "Is_Bundled_Addon__c",
|
||||
simDataSize: process.env.PRODUCT_SIM_DATA_SIZE_FIELD || "SIM_Data_Size__c",
|
||||
simPlanType: process.env.PRODUCT_SIM_PLAN_TYPE_FIELD || "SIM_Plan_Type__c",
|
||||
simHasFamilyDiscount:
|
||||
process.env.PRODUCT_SIM_HAS_FAMILY_DISCOUNT_FIELD || "SIM_Has_Family_Discount__c",
|
||||
vpnRegion: process.env.PRODUCT_VPN_REGION_FIELD || "VPN_Region__c",
|
||||
},
|
||||
order: {
|
||||
orderType: process.env.ORDER_TYPE_FIELD || "Type",
|
||||
activationType: process.env.ORDER_ACTIVATION_TYPE_FIELD || "Activation_Type__c",
|
||||
activationScheduledAt:
|
||||
process.env.ORDER_ACTIVATION_SCHEDULED_AT_FIELD || "Activation_Scheduled_At__c",
|
||||
activationStatus: process.env.ORDER_ACTIVATION_STATUS_FIELD || "Activation_Status__c",
|
||||
internetPlanTier: process.env.ORDER_INTERNET_PLAN_TIER_FIELD || "Internet_Plan_Tier__c",
|
||||
installationType: process.env.ORDER_INSTALLATION_TYPE_FIELD || "Installment_Plan__c",
|
||||
weekendInstall: process.env.ORDER_WEEKEND_INSTALL_FIELD || "Weekend_Install__c",
|
||||
accessMode: process.env.ORDER_ACCESS_MODE_FIELD || "Access_Mode__c",
|
||||
hikariDenwa: process.env.ORDER_HIKARI_DENWA_FIELD || "Hikari_Denwa__c",
|
||||
vpnRegion: process.env.ORDER_VPN_REGION_FIELD || "VPN_Region__c",
|
||||
simType: process.env.ORDER_SIM_TYPE_FIELD || "SIM_Type__c",
|
||||
eid: process.env.ORDER_EID_FIELD || "EID__c",
|
||||
simVoiceMail: process.env.ORDER_SIM_VOICE_MAIL_FIELD || "SIM_Voice_Mail__c",
|
||||
simCallWaiting: process.env.ORDER_SIM_CALL_WAITING_FIELD || "SIM_Call_Waiting__c",
|
||||
mnp: {
|
||||
application: process.env.ORDER_MNP_APPLICATION_FIELD || "MNP_Application__c",
|
||||
reservationNumber: process.env.ORDER_MNP_RESERVATION_FIELD || "MNP_Reservation_Number__c",
|
||||
expiryDate: process.env.ORDER_MNP_EXPIRY_FIELD || "MNP_Expiry_Date__c",
|
||||
phoneNumber: process.env.ORDER_MNP_PHONE_FIELD || "MNP_Phone_Number__c",
|
||||
mvnoAccountNumber: process.env.ORDER_MVNO_ACCOUNT_NUMBER_FIELD || "MVNO_Account_Number__c",
|
||||
portingDateOfBirth: process.env.ORDER_PORTING_DOB_FIELD || "Porting_DateOfBirth__c",
|
||||
portingFirstName: process.env.ORDER_PORTING_FIRST_NAME_FIELD || "Porting_FirstName__c",
|
||||
portingLastName: process.env.ORDER_PORTING_LAST_NAME_FIELD || "Porting_LastName__c",
|
||||
portingFirstNameKatakana:
|
||||
process.env.ORDER_PORTING_FIRST_NAME_KATAKANA_FIELD || "Porting_FirstName_Katakana__c",
|
||||
portingLastNameKatakana:
|
||||
process.env.ORDER_PORTING_LAST_NAME_KATAKANA_FIELD || "Porting_LastName_Katakana__c",
|
||||
portingGender: process.env.ORDER_PORTING_GENDER_FIELD || "Porting_Gender__c",
|
||||
@Injectable()
|
||||
export class SalesforceFieldMapService {
|
||||
constructor(private readonly configService: ConfigService) {}
|
||||
|
||||
getFieldMap(): SalesforceFieldMap {
|
||||
return {
|
||||
account: {
|
||||
internetEligibility: this.configService.get<string>("ACCOUNT_INTERNET_ELIGIBILITY_FIELD")!,
|
||||
customerNumber: this.configService.get<string>("ACCOUNT_CUSTOMER_NUMBER_FIELD")!,
|
||||
},
|
||||
whmcsOrderId: process.env.ORDER_WHMCS_ORDER_ID_FIELD || "WHMCS_Order_ID__c",
|
||||
lastErrorCode: process.env.ORDER_ACTIVATION_ERROR_CODE_FIELD || "Activation_Error_Code__c",
|
||||
lastErrorMessage:
|
||||
process.env.ORDER_ACTIVATION_ERROR_MESSAGE_FIELD || "Activation_Error_Message__c",
|
||||
lastAttemptAt: process.env.ORDER_ACTIVATION_LAST_ATTEMPT_AT_FIELD || "ActivatedDate",
|
||||
addressChanged: process.env.ORDER_ADDRESS_CHANGED_FIELD || "Address_Changed__c",
|
||||
billing: {
|
||||
street: process.env.ORDER_BILLING_STREET_FIELD || "BillingStreet",
|
||||
city: process.env.ORDER_BILLING_CITY_FIELD || "BillingCity",
|
||||
state: process.env.ORDER_BILLING_STATE_FIELD || "BillingState",
|
||||
postalCode: process.env.ORDER_BILLING_POSTAL_CODE_FIELD || "BillingPostalCode",
|
||||
country: process.env.ORDER_BILLING_COUNTRY_FIELD || "BillingCountry",
|
||||
product: {
|
||||
sku: this.configService.get<string>("PRODUCT_SKU_FIELD")!,
|
||||
portalCategory: this.configService.get<string>("PRODUCT_PORTAL_CATEGORY_FIELD")!,
|
||||
portalCatalog: this.configService.get<string>("PRODUCT_PORTAL_CATALOG_FIELD")!,
|
||||
portalAccessible: this.configService.get<string>("PRODUCT_PORTAL_ACCESSIBLE_FIELD")!,
|
||||
itemClass: this.configService.get<string>("PRODUCT_ITEM_CLASS_FIELD")!,
|
||||
billingCycle: this.configService.get<string>("PRODUCT_BILLING_CYCLE_FIELD")!,
|
||||
whmcsProductId: this.configService.get<string>("PRODUCT_WHMCS_PRODUCT_ID_FIELD")!,
|
||||
whmcsProductName: this.configService.get<string>("PRODUCT_WHMCS_PRODUCT_NAME_FIELD")!,
|
||||
internetPlanTier: this.configService.get<string>("PRODUCT_INTERNET_PLAN_TIER_FIELD")!,
|
||||
internetOfferingType: this.configService.get<string>(
|
||||
"PRODUCT_INTERNET_OFFERING_TYPE_FIELD"
|
||||
)!,
|
||||
displayOrder: this.configService.get<string>("PRODUCT_DISPLAY_ORDER_FIELD")!,
|
||||
bundledAddon: this.configService.get<string>("PRODUCT_BUNDLED_ADDON_FIELD")!,
|
||||
isBundledAddon: this.configService.get<string>("PRODUCT_IS_BUNDLED_ADDON_FIELD")!,
|
||||
simDataSize: this.configService.get<string>("PRODUCT_SIM_DATA_SIZE_FIELD")!,
|
||||
simPlanType: this.configService.get<string>("PRODUCT_SIM_PLAN_TYPE_FIELD")!,
|
||||
simHasFamilyDiscount: this.configService.get<string>(
|
||||
"PRODUCT_SIM_HAS_FAMILY_DISCOUNT_FIELD"
|
||||
)!,
|
||||
vpnRegion: this.configService.get<string>("PRODUCT_VPN_REGION_FIELD")!,
|
||||
},
|
||||
},
|
||||
orderItem: {
|
||||
billingCycle: process.env.ORDER_ITEM_BILLING_CYCLE_FIELD || "Billing_Cycle__c",
|
||||
whmcsServiceId: process.env.ORDER_ITEM_WHMCS_SERVICE_ID_FIELD || "WHMCS_Service_ID__c",
|
||||
},
|
||||
};
|
||||
}
|
||||
order: {
|
||||
orderType: this.configService.get<string>("ORDER_TYPE_FIELD")!,
|
||||
activationType: this.configService.get<string>("ORDER_ACTIVATION_TYPE_FIELD")!,
|
||||
activationScheduledAt: this.configService.get<string>(
|
||||
"ORDER_ACTIVATION_SCHEDULED_AT_FIELD"
|
||||
)!,
|
||||
activationStatus: this.configService.get<string>("ORDER_ACTIVATION_STATUS_FIELD")!,
|
||||
internetPlanTier: this.configService.get<string>("ORDER_INTERNET_PLAN_TIER_FIELD")!,
|
||||
installationType: this.configService.get<string>("ORDER_INSTALLATION_TYPE_FIELD")!,
|
||||
weekendInstall: this.configService.get<string>("ORDER_WEEKEND_INSTALL_FIELD")!,
|
||||
accessMode: this.configService.get<string>("ORDER_ACCESS_MODE_FIELD")!,
|
||||
hikariDenwa: this.configService.get<string>("ORDER_HIKARI_DENWA_FIELD")!,
|
||||
vpnRegion: this.configService.get<string>("ORDER_VPN_REGION_FIELD")!,
|
||||
simType: this.configService.get<string>("ORDER_SIM_TYPE_FIELD")!,
|
||||
eid: this.configService.get<string>("ORDER_EID_FIELD")!,
|
||||
simVoiceMail: this.configService.get<string>("ORDER_SIM_VOICE_MAIL_FIELD")!,
|
||||
simCallWaiting: this.configService.get<string>("ORDER_SIM_CALL_WAITING_FIELD")!,
|
||||
mnp: {
|
||||
application: this.configService.get<string>("ORDER_MNP_APPLICATION_FIELD")!,
|
||||
reservationNumber: this.configService.get<string>("ORDER_MNP_RESERVATION_FIELD")!,
|
||||
expiryDate: this.configService.get<string>("ORDER_MNP_EXPIRY_FIELD")!,
|
||||
phoneNumber: this.configService.get<string>("ORDER_MNP_PHONE_FIELD")!,
|
||||
mvnoAccountNumber: this.configService.get<string>("ORDER_MVNO_ACCOUNT_NUMBER_FIELD")!,
|
||||
portingDateOfBirth: this.configService.get<string>("ORDER_PORTING_DOB_FIELD")!,
|
||||
portingFirstName: this.configService.get<string>("ORDER_PORTING_FIRST_NAME_FIELD")!,
|
||||
portingLastName: this.configService.get<string>("ORDER_PORTING_LAST_NAME_FIELD")!,
|
||||
portingFirstNameKatakana: this.configService.get<string>(
|
||||
"ORDER_PORTING_FIRST_NAME_KATAKANA_FIELD"
|
||||
)!,
|
||||
portingLastNameKatakana: this.configService.get<string>(
|
||||
"ORDER_PORTING_LAST_NAME_KATAKANA_FIELD"
|
||||
)!,
|
||||
portingGender: this.configService.get<string>("ORDER_PORTING_GENDER_FIELD")!,
|
||||
},
|
||||
whmcsOrderId: this.configService.get<string>("ORDER_WHMCS_ORDER_ID_FIELD")!,
|
||||
lastErrorCode: this.configService.get<string>("ORDER_ACTIVATION_ERROR_CODE_FIELD"),
|
||||
lastErrorMessage: this.configService.get<string>("ORDER_ACTIVATION_ERROR_MESSAGE_FIELD"),
|
||||
lastAttemptAt: this.configService.get<string>("ORDER_ACTIVATION_LAST_ATTEMPT_AT_FIELD"),
|
||||
addressChanged: this.configService.get<string>("ORDER_ADDRESS_CHANGED_FIELD")!,
|
||||
billing: {
|
||||
street: this.configService.get<string>("ORDER_BILLING_STREET_FIELD")!,
|
||||
city: this.configService.get<string>("ORDER_BILLING_CITY_FIELD")!,
|
||||
state: this.configService.get<string>("ORDER_BILLING_STATE_FIELD")!,
|
||||
postalCode: this.configService.get<string>("ORDER_BILLING_POSTAL_CODE_FIELD")!,
|
||||
country: this.configService.get<string>("ORDER_BILLING_COUNTRY_FIELD")!,
|
||||
},
|
||||
},
|
||||
orderItem: {
|
||||
billingCycle: this.configService.get<string>("ORDER_ITEM_BILLING_CYCLE_FIELD")!,
|
||||
whmcsServiceId: this.configService.get<string>("ORDER_ITEM_WHMCS_SERVICE_ID_FIELD")!,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function getProductQueryFields(): string {
|
||||
const fields = getSalesforceFieldMap();
|
||||
return [
|
||||
"Id",
|
||||
"Name",
|
||||
fields.product.sku,
|
||||
fields.product.portalCategory,
|
||||
fields.product.portalCatalog,
|
||||
fields.product.portalAccessible,
|
||||
fields.product.itemClass,
|
||||
fields.product.billingCycle,
|
||||
fields.product.whmcsProductId,
|
||||
fields.product.whmcsProductName,
|
||||
fields.product.internetPlanTier,
|
||||
fields.product.internetOfferingType,
|
||||
fields.product.displayOrder,
|
||||
fields.product.bundledAddon,
|
||||
fields.product.isBundledAddon,
|
||||
fields.product.simDataSize,
|
||||
fields.product.simPlanType,
|
||||
fields.product.simHasFamilyDiscount,
|
||||
].join(", ");
|
||||
}
|
||||
getProductQueryFields(): string {
|
||||
const fields = this.getFieldMap();
|
||||
return [
|
||||
"Id",
|
||||
"Name",
|
||||
fields.product.sku,
|
||||
fields.product.portalCategory,
|
||||
fields.product.portalCatalog,
|
||||
fields.product.portalAccessible,
|
||||
fields.product.itemClass,
|
||||
fields.product.billingCycle,
|
||||
fields.product.whmcsProductId,
|
||||
fields.product.whmcsProductName,
|
||||
fields.product.internetPlanTier,
|
||||
fields.product.internetOfferingType,
|
||||
fields.product.displayOrder,
|
||||
fields.product.bundledAddon,
|
||||
fields.product.isBundledAddon,
|
||||
fields.product.simDataSize,
|
||||
fields.product.simPlanType,
|
||||
fields.product.simHasFamilyDiscount,
|
||||
fields.product.vpnRegion,
|
||||
"UnitPrice",
|
||||
"IsActive",
|
||||
].join(", ");
|
||||
}
|
||||
|
||||
export function getOrderQueryFields(): string {
|
||||
const fields = getSalesforceFieldMap();
|
||||
return [
|
||||
"Id",
|
||||
"AccountId",
|
||||
"Status",
|
||||
"EffectiveDate",
|
||||
fields.order.orderType,
|
||||
fields.order.activationType,
|
||||
fields.order.activationScheduledAt,
|
||||
fields.order.activationStatus,
|
||||
fields.order.lastErrorCode!,
|
||||
fields.order.lastErrorMessage!,
|
||||
fields.order.lastAttemptAt!,
|
||||
fields.order.internetPlanTier,
|
||||
fields.order.installationType,
|
||||
fields.order.weekendInstall,
|
||||
fields.order.accessMode,
|
||||
fields.order.hikariDenwa,
|
||||
fields.order.vpnRegion,
|
||||
fields.order.simType,
|
||||
fields.order.simVoiceMail,
|
||||
fields.order.simCallWaiting,
|
||||
fields.order.eid,
|
||||
fields.order.whmcsOrderId,
|
||||
].join(", ");
|
||||
}
|
||||
getOrderQueryFields(): string {
|
||||
const fields = this.getFieldMap();
|
||||
return [
|
||||
"Id",
|
||||
"AccountId",
|
||||
"Status",
|
||||
"EffectiveDate",
|
||||
fields.order.orderType,
|
||||
fields.order.activationType,
|
||||
fields.order.activationScheduledAt,
|
||||
fields.order.activationStatus,
|
||||
fields.order.lastErrorCode!,
|
||||
fields.order.lastErrorMessage!,
|
||||
fields.order.lastAttemptAt!,
|
||||
fields.order.internetPlanTier,
|
||||
fields.order.installationType,
|
||||
fields.order.weekendInstall,
|
||||
fields.order.accessMode,
|
||||
fields.order.hikariDenwa,
|
||||
fields.order.vpnRegion,
|
||||
fields.order.simType,
|
||||
fields.order.simVoiceMail,
|
||||
fields.order.simCallWaiting,
|
||||
fields.order.eid,
|
||||
fields.order.whmcsOrderId,
|
||||
].join(", ");
|
||||
}
|
||||
|
||||
export function getOrderItemProduct2Select(additional: string[] = []): string {
|
||||
const fields = getSalesforceFieldMap();
|
||||
const base = [
|
||||
"Id",
|
||||
"Name",
|
||||
fields.product.sku,
|
||||
fields.product.whmcsProductId,
|
||||
fields.product.itemClass,
|
||||
fields.product.billingCycle,
|
||||
];
|
||||
const all = [...base, ...additional];
|
||||
return all.map(f => `PricebookEntry.Product2.${f}`).join(", ");
|
||||
getOrderItemProduct2Select(additional: string[] = []): string {
|
||||
const fields = this.getFieldMap();
|
||||
const base = [
|
||||
"Id",
|
||||
"Name",
|
||||
fields.product.sku,
|
||||
fields.product.whmcsProductId,
|
||||
fields.product.itemClass,
|
||||
fields.product.billingCycle,
|
||||
];
|
||||
const all = [...base, ...additional];
|
||||
return all.map(f => `PricebookEntry.Product2.${f}`).join(", ");
|
||||
}
|
||||
}
|
||||
|
||||
11
apps/bff/src/core/database/database.module.ts
Normal file
11
apps/bff/src/core/database/database.module.ts
Normal file
@ -0,0 +1,11 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { PrismaModule } from "@bff/infra/database/prisma.module";
|
||||
import { TransactionService } from "./services/transaction.service";
|
||||
import { DistributedTransactionService } from "./services/distributed-transaction.service";
|
||||
|
||||
@Module({
|
||||
imports: [PrismaModule],
|
||||
providers: [TransactionService, DistributedTransactionService],
|
||||
exports: [TransactionService, DistributedTransactionService],
|
||||
})
|
||||
export class DatabaseModule {}
|
||||
@ -0,0 +1,390 @@
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { TransactionService, TransactionContext } from "./transaction.service";
|
||||
import { getErrorMessage } from "@bff/core/utils/error.util";
|
||||
|
||||
export interface DistributedStep {
|
||||
id: string;
|
||||
description: string;
|
||||
execute: () => Promise<any>;
|
||||
rollback?: () => Promise<void>;
|
||||
critical?: boolean; // If true, failure stops entire transaction
|
||||
retryable?: boolean; // If true, step can be retried on failure
|
||||
}
|
||||
|
||||
export interface DistributedTransactionOptions {
|
||||
description: string;
|
||||
timeout?: number;
|
||||
maxRetries?: number;
|
||||
continueOnNonCriticalFailure?: boolean;
|
||||
}
|
||||
|
||||
export interface DistributedTransactionResult<T = any> {
|
||||
success: boolean;
|
||||
data?: T;
|
||||
error?: string;
|
||||
duration: number;
|
||||
stepsExecuted: number;
|
||||
stepsRolledBack: number;
|
||||
stepResults: Record<string, any>;
|
||||
failedSteps: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Service for managing distributed transactions across multiple external systems
|
||||
* Provides coordination between database operations and external API calls
|
||||
*/
|
||||
@Injectable()
|
||||
export class DistributedTransactionService {
|
||||
constructor(
|
||||
private readonly transactionService: TransactionService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Execute a distributed transaction with multiple steps across different systems
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const result = await this.distributedTransactionService.executeDistributedTransaction([
|
||||
* {
|
||||
* id: 'sf_status_update',
|
||||
* description: 'Update Salesforce order status to Activating',
|
||||
* execute: async () => {
|
||||
* return await this.salesforceService.updateOrder({
|
||||
* Id: sfOrderId,
|
||||
* Status: 'Activating'
|
||||
* });
|
||||
* },
|
||||
* rollback: async () => {
|
||||
* await this.salesforceService.updateOrder({
|
||||
* Id: sfOrderId,
|
||||
* Status: 'Draft'
|
||||
* });
|
||||
* },
|
||||
* critical: true
|
||||
* },
|
||||
* {
|
||||
* id: 'whmcs_create_order',
|
||||
* description: 'Create order in WHMCS',
|
||||
* execute: async () => {
|
||||
* return await this.whmcsOrderService.createOrder(orderData);
|
||||
* },
|
||||
* rollback: async () => {
|
||||
* if (stepResults.whmcs_create_order?.orderId) {
|
||||
* await this.whmcsOrderService.cancelOrder(stepResults.whmcs_create_order.orderId);
|
||||
* }
|
||||
* },
|
||||
* critical: true
|
||||
* }
|
||||
* ], {
|
||||
* description: 'Order fulfillment workflow',
|
||||
* timeout: 120000
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
async executeDistributedTransaction(
|
||||
steps: DistributedStep[],
|
||||
options: DistributedTransactionOptions
|
||||
): Promise<DistributedTransactionResult> {
|
||||
const {
|
||||
description,
|
||||
timeout = 120000, // 2 minutes default for distributed operations
|
||||
maxRetries = 1, // Less retries for distributed operations
|
||||
continueOnNonCriticalFailure = false
|
||||
} = options;
|
||||
|
||||
const transactionId = this.generateTransactionId();
|
||||
const startTime = Date.now();
|
||||
|
||||
this.logger.log(`Starting distributed transaction [${transactionId}]`, {
|
||||
description,
|
||||
stepsCount: steps.length,
|
||||
timeout
|
||||
});
|
||||
|
||||
const stepResults: Record<string, any> = {};
|
||||
const executedSteps: string[] = [];
|
||||
const failedSteps: string[] = [];
|
||||
let lastError: Error | null = null;
|
||||
|
||||
try {
|
||||
// Execute steps sequentially
|
||||
for (const step of steps) {
|
||||
this.logger.debug(`Executing step: ${step.id} [${transactionId}]`, {
|
||||
description: step.description,
|
||||
critical: step.critical
|
||||
});
|
||||
|
||||
try {
|
||||
const stepStartTime = Date.now();
|
||||
const result = await this.executeStepWithTimeout(step, timeout);
|
||||
const stepDuration = Date.now() - stepStartTime;
|
||||
|
||||
stepResults[step.id] = result;
|
||||
executedSteps.push(step.id);
|
||||
|
||||
this.logger.debug(`Step completed: ${step.id} [${transactionId}]`, {
|
||||
duration: stepDuration
|
||||
});
|
||||
|
||||
} catch (stepError) {
|
||||
lastError = stepError as Error;
|
||||
failedSteps.push(step.id);
|
||||
|
||||
this.logger.error(`Step failed: ${step.id} [${transactionId}]`, {
|
||||
error: getErrorMessage(stepError),
|
||||
critical: step.critical,
|
||||
retryable: step.retryable
|
||||
});
|
||||
|
||||
// If it's a critical step, stop the entire transaction
|
||||
if (step.critical) {
|
||||
throw stepError;
|
||||
}
|
||||
|
||||
// If we're not continuing on non-critical failures, stop
|
||||
if (!continueOnNonCriticalFailure) {
|
||||
throw stepError;
|
||||
}
|
||||
|
||||
// Otherwise, log and continue
|
||||
this.logger.warn(`Continuing despite non-critical step failure: ${step.id} [${transactionId}]`);
|
||||
}
|
||||
}
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
this.logger.log(`Distributed transaction completed successfully [${transactionId}]`, {
|
||||
description,
|
||||
duration,
|
||||
stepsExecuted: executedSteps.length,
|
||||
failedSteps: failedSteps.length
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: stepResults,
|
||||
duration,
|
||||
stepsExecuted: executedSteps.length,
|
||||
stepsRolledBack: 0,
|
||||
stepResults,
|
||||
failedSteps
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
this.logger.error(`Distributed transaction failed [${transactionId}]`, {
|
||||
description,
|
||||
error: getErrorMessage(error),
|
||||
duration,
|
||||
stepsExecuted: executedSteps.length,
|
||||
failedSteps: failedSteps.length
|
||||
});
|
||||
|
||||
// Execute rollbacks for completed steps
|
||||
const rollbacksExecuted = await this.executeRollbacks(
|
||||
steps,
|
||||
executedSteps,
|
||||
stepResults,
|
||||
transactionId
|
||||
);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: getErrorMessage(error),
|
||||
duration,
|
||||
stepsExecuted: executedSteps.length,
|
||||
stepsRolledBack: rollbacksExecuted,
|
||||
stepResults,
|
||||
failedSteps
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a hybrid transaction that combines database operations with external system calls
|
||||
*/
|
||||
async executeHybridTransaction<T>(
|
||||
databaseOperation: (tx: any, context: TransactionContext) => Promise<T>,
|
||||
externalSteps: DistributedStep[],
|
||||
options: DistributedTransactionOptions & {
|
||||
databaseFirst?: boolean;
|
||||
rollbackDatabaseOnExternalFailure?: boolean;
|
||||
}
|
||||
): Promise<DistributedTransactionResult<T>> {
|
||||
const {
|
||||
databaseFirst = true,
|
||||
rollbackDatabaseOnExternalFailure = true,
|
||||
...distributedOptions
|
||||
} = options;
|
||||
|
||||
const transactionId = this.generateTransactionId();
|
||||
const startTime = Date.now();
|
||||
|
||||
this.logger.log(`Starting hybrid transaction [${transactionId}]`, {
|
||||
description: options.description,
|
||||
databaseFirst,
|
||||
externalStepsCount: externalSteps.length
|
||||
});
|
||||
|
||||
try {
|
||||
let databaseResult: T | null = null;
|
||||
let externalResult: DistributedTransactionResult | null = null;
|
||||
|
||||
if (databaseFirst) {
|
||||
// Execute database operations first
|
||||
this.logger.debug(`Executing database operations [${transactionId}]`);
|
||||
const dbTransactionResult = await this.transactionService.executeTransaction(
|
||||
databaseOperation,
|
||||
{
|
||||
description: `${options.description} - Database Operations`,
|
||||
timeout: options.timeout
|
||||
}
|
||||
);
|
||||
|
||||
if (!dbTransactionResult.success) {
|
||||
throw new Error(dbTransactionResult.error || 'Database transaction failed');
|
||||
}
|
||||
|
||||
databaseResult = dbTransactionResult.data!;
|
||||
|
||||
// Execute external operations
|
||||
this.logger.debug(`Executing external operations [${transactionId}]`);
|
||||
externalResult = await this.executeDistributedTransaction(externalSteps, {
|
||||
...distributedOptions,
|
||||
description: distributedOptions.description || 'External operations'
|
||||
});
|
||||
|
||||
if (!externalResult.success && rollbackDatabaseOnExternalFailure) {
|
||||
// Note: Database transaction already committed, so we can't rollback automatically
|
||||
// This is a limitation of this approach - consider using saga pattern for true rollback
|
||||
this.logger.error(`External operations failed but database already committed [${transactionId}]`, {
|
||||
externalError: externalResult.error
|
||||
});
|
||||
}
|
||||
|
||||
} else {
|
||||
// Execute external operations first
|
||||
this.logger.debug(`Executing external operations [${transactionId}]`);
|
||||
externalResult = await this.executeDistributedTransaction(externalSteps, {
|
||||
...distributedOptions,
|
||||
description: distributedOptions.description || 'External operations'
|
||||
});
|
||||
|
||||
if (!externalResult.success) {
|
||||
throw new Error(externalResult.error || 'External operations failed');
|
||||
}
|
||||
|
||||
// Execute database operations
|
||||
this.logger.debug(`Executing database operations [${transactionId}]`);
|
||||
const dbTransactionResult = await this.transactionService.executeTransaction(
|
||||
databaseOperation,
|
||||
{
|
||||
description: `${options.description} - Database Operations`,
|
||||
timeout: options.timeout
|
||||
}
|
||||
);
|
||||
|
||||
if (!dbTransactionResult.success) {
|
||||
// Rollback external operations
|
||||
await this.executeRollbacks(
|
||||
externalSteps,
|
||||
Object.keys(externalResult.stepResults),
|
||||
externalResult.stepResults,
|
||||
transactionId
|
||||
);
|
||||
throw new Error(dbTransactionResult.error || 'Database transaction failed');
|
||||
}
|
||||
|
||||
databaseResult = dbTransactionResult.data!;
|
||||
}
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
this.logger.log(`Hybrid transaction completed successfully [${transactionId}]`, {
|
||||
description: options.description,
|
||||
duration
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: databaseResult,
|
||||
duration,
|
||||
stepsExecuted: externalResult?.stepsExecuted || 0,
|
||||
stepsRolledBack: 0,
|
||||
stepResults: externalResult?.stepResults || {},
|
||||
failedSteps: externalResult?.failedSteps || []
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
this.logger.error(`Hybrid transaction failed [${transactionId}]`, {
|
||||
description: options.description,
|
||||
error: getErrorMessage(error),
|
||||
duration
|
||||
});
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: getErrorMessage(error),
|
||||
duration,
|
||||
stepsExecuted: 0,
|
||||
stepsRolledBack: 0,
|
||||
stepResults: {},
|
||||
failedSteps: []
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private async executeStepWithTimeout(step: DistributedStep, timeout: number): Promise<any> {
|
||||
return Promise.race([
|
||||
step.execute(),
|
||||
new Promise((_, reject) => {
|
||||
setTimeout(() => {
|
||||
reject(new Error(`Step ${step.id} timed out after ${timeout}ms`));
|
||||
}, timeout);
|
||||
})
|
||||
]);
|
||||
}
|
||||
|
||||
private async executeRollbacks(
|
||||
steps: DistributedStep[],
|
||||
executedSteps: string[],
|
||||
stepResults: Record<string, any>,
|
||||
transactionId: string
|
||||
): Promise<number> {
|
||||
this.logger.warn(`Executing rollbacks for ${executedSteps.length} steps [${transactionId}]`);
|
||||
|
||||
let rollbacksExecuted = 0;
|
||||
|
||||
// Execute rollbacks in reverse order (LIFO)
|
||||
for (let i = executedSteps.length - 1; i >= 0; i--) {
|
||||
const stepId = executedSteps[i];
|
||||
const step = steps.find(s => s.id === stepId);
|
||||
|
||||
if (step?.rollback) {
|
||||
try {
|
||||
this.logger.debug(`Executing rollback for step: ${stepId} [${transactionId}]`);
|
||||
await step.rollback();
|
||||
rollbacksExecuted++;
|
||||
this.logger.debug(`Rollback completed for step: ${stepId} [${transactionId}]`);
|
||||
} catch (rollbackError) {
|
||||
this.logger.error(`Rollback failed for step: ${stepId} [${transactionId}]`, {
|
||||
error: getErrorMessage(rollbackError)
|
||||
});
|
||||
// Continue with other rollbacks even if one fails
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.log(`Completed ${rollbacksExecuted} rollbacks [${transactionId}]`);
|
||||
return rollbacksExecuted;
|
||||
}
|
||||
|
||||
private generateTransactionId(): string {
|
||||
return `dtx_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`;
|
||||
}
|
||||
}
|
||||
332
apps/bff/src/core/database/services/transaction.service.ts
Normal file
332
apps/bff/src/core/database/services/transaction.service.ts
Normal file
@ -0,0 +1,332 @@
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { PrismaService } from "@bff/infra/database/prisma.service";
|
||||
import { getErrorMessage } from "@bff/core/utils/error.util";
|
||||
|
||||
export interface TransactionContext {
|
||||
id: string;
|
||||
startTime: Date;
|
||||
operations: string[];
|
||||
rollbackActions: (() => Promise<void>)[];
|
||||
}
|
||||
|
||||
export interface TransactionOptions {
|
||||
/**
|
||||
* Maximum time to wait for transaction to complete (ms)
|
||||
* Default: 30 seconds
|
||||
*/
|
||||
timeout?: number;
|
||||
|
||||
/**
|
||||
* Maximum number of retry attempts on serialization failures
|
||||
* Default: 3
|
||||
*/
|
||||
maxRetries?: number;
|
||||
|
||||
/**
|
||||
* Custom isolation level for the transaction
|
||||
* Default: ReadCommitted
|
||||
*/
|
||||
isolationLevel?: 'ReadUncommitted' | 'ReadCommitted' | 'RepeatableRead' | 'Serializable';
|
||||
|
||||
/**
|
||||
* Description of the transaction for logging
|
||||
*/
|
||||
description?: string;
|
||||
|
||||
/**
|
||||
* Whether to automatically rollback external operations on database rollback
|
||||
* Default: true
|
||||
*/
|
||||
autoRollback?: boolean;
|
||||
}
|
||||
|
||||
export interface TransactionResult<T> {
|
||||
success: boolean;
|
||||
data?: T;
|
||||
error?: string;
|
||||
duration: number;
|
||||
operationsCount: number;
|
||||
rollbacksExecuted: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Service for managing database transactions with external operation coordination
|
||||
* Provides atomic operations across database and external systems
|
||||
*/
|
||||
@Injectable()
|
||||
export class TransactionService {
|
||||
private readonly defaultTimeout = 30000; // 30 seconds
|
||||
private readonly defaultMaxRetries = 3;
|
||||
|
||||
constructor(
|
||||
private readonly prisma: PrismaService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Execute operations within a database transaction with rollback support
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const result = await this.transactionService.executeTransaction(
|
||||
* async (tx, context) => {
|
||||
* // Database operations
|
||||
* const user = await tx.user.create({ data: userData });
|
||||
*
|
||||
* // External operations with rollback
|
||||
* const whmcsClient = await this.whmcsService.createClient(user.email);
|
||||
* context.addRollback(async () => {
|
||||
* await this.whmcsService.deleteClient(whmcsClient.id);
|
||||
* });
|
||||
*
|
||||
* // Salesforce operations with rollback
|
||||
* const sfAccount = await this.salesforceService.createAccount(user);
|
||||
* context.addRollback(async () => {
|
||||
* await this.salesforceService.deleteAccount(sfAccount.Id);
|
||||
* });
|
||||
*
|
||||
* return { user, whmcsClient, sfAccount };
|
||||
* },
|
||||
* {
|
||||
* description: "User signup with external integrations",
|
||||
* timeout: 60000
|
||||
* }
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
async executeTransaction<T>(
|
||||
operation: (tx: any, context: TransactionContext) => Promise<T>,
|
||||
options: TransactionOptions = {}
|
||||
): Promise<TransactionResult<T>> {
|
||||
const {
|
||||
timeout = this.defaultTimeout,
|
||||
maxRetries = this.defaultMaxRetries,
|
||||
isolationLevel = 'ReadCommitted',
|
||||
description = 'Database transaction',
|
||||
autoRollback = true
|
||||
} = options;
|
||||
|
||||
const transactionId = this.generateTransactionId();
|
||||
const startTime = new Date();
|
||||
|
||||
let context: TransactionContext = {
|
||||
id: transactionId,
|
||||
startTime,
|
||||
operations: [],
|
||||
rollbackActions: []
|
||||
};
|
||||
|
||||
this.logger.log(`Starting transaction [${transactionId}]`, {
|
||||
description,
|
||||
timeout,
|
||||
isolationLevel,
|
||||
maxRetries
|
||||
});
|
||||
|
||||
let attempt = 0;
|
||||
let lastError: Error | null = null;
|
||||
|
||||
while (attempt < maxRetries) {
|
||||
attempt++;
|
||||
|
||||
try {
|
||||
// Reset context for retry attempts
|
||||
if (attempt > 1) {
|
||||
context = {
|
||||
id: transactionId,
|
||||
startTime,
|
||||
operations: [],
|
||||
rollbackActions: []
|
||||
};
|
||||
}
|
||||
|
||||
const result = await Promise.race([
|
||||
this.executeTransactionAttempt(operation, context, isolationLevel),
|
||||
this.createTimeoutPromise<T>(timeout, transactionId)
|
||||
]);
|
||||
|
||||
const duration = Date.now() - startTime.getTime();
|
||||
|
||||
this.logger.log(`Transaction completed successfully [${transactionId}]`, {
|
||||
description,
|
||||
duration,
|
||||
attempt,
|
||||
operationsCount: context.operations.length
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: result,
|
||||
duration,
|
||||
operationsCount: context.operations.length,
|
||||
rollbacksExecuted: 0
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
lastError = error as Error;
|
||||
const duration = Date.now() - startTime.getTime();
|
||||
|
||||
this.logger.error(`Transaction attempt ${attempt} failed [${transactionId}]`, {
|
||||
description,
|
||||
error: getErrorMessage(error),
|
||||
duration,
|
||||
operationsCount: context.operations.length,
|
||||
rollbackActionsCount: context.rollbackActions.length
|
||||
});
|
||||
|
||||
// Execute rollbacks if this is the final attempt or not a retryable error
|
||||
if (attempt === maxRetries || !this.isRetryableError(error)) {
|
||||
const rollbacksExecuted = await this.executeRollbacks(context, autoRollback);
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: getErrorMessage(error),
|
||||
duration,
|
||||
operationsCount: context.operations.length,
|
||||
rollbacksExecuted
|
||||
};
|
||||
}
|
||||
|
||||
// Wait before retry (exponential backoff)
|
||||
await this.delay(Math.pow(2, attempt - 1) * 1000);
|
||||
}
|
||||
}
|
||||
|
||||
// This should never be reached, but just in case
|
||||
const duration = Date.now() - startTime.getTime();
|
||||
return {
|
||||
success: false,
|
||||
error: lastError ? getErrorMessage(lastError) : 'Unknown transaction error',
|
||||
duration,
|
||||
operationsCount: context.operations.length,
|
||||
rollbacksExecuted: 0
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a simple database-only transaction (no external operations)
|
||||
*/
|
||||
async executeSimpleTransaction<T>(
|
||||
operation: (tx: any) => Promise<T>,
|
||||
options: Omit<TransactionOptions, 'autoRollback'> = {}
|
||||
): Promise<T> {
|
||||
const result = await this.executeTransaction(
|
||||
async (tx, _context) => operation(tx),
|
||||
{ ...options, autoRollback: false }
|
||||
);
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || 'Transaction failed');
|
||||
}
|
||||
|
||||
return result.data!;
|
||||
}
|
||||
|
||||
private async executeTransactionAttempt<T>(
|
||||
operation: (tx: any, context: TransactionContext) => Promise<T>,
|
||||
context: TransactionContext,
|
||||
isolationLevel: string
|
||||
): Promise<T> {
|
||||
return await this.prisma.$transaction(
|
||||
async (tx) => {
|
||||
// Enhance context with helper methods
|
||||
const enhancedContext = this.enhanceContext(context);
|
||||
|
||||
// Execute the operation
|
||||
return await operation(tx, enhancedContext);
|
||||
},
|
||||
{
|
||||
isolationLevel: isolationLevel as any,
|
||||
timeout: 30000 // Prisma transaction timeout
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
private enhanceContext(context: TransactionContext): TransactionContext {
|
||||
return {
|
||||
...context,
|
||||
addOperation: (description: string) => {
|
||||
context.operations.push(`${new Date().toISOString()}: ${description}`);
|
||||
},
|
||||
addRollback: (rollbackFn: () => Promise<void>) => {
|
||||
context.rollbackActions.push(rollbackFn);
|
||||
}
|
||||
} as TransactionContext & {
|
||||
addOperation: (description: string) => void;
|
||||
addRollback: (rollbackFn: () => Promise<void>) => void;
|
||||
};
|
||||
}
|
||||
|
||||
private async executeRollbacks(
|
||||
context: TransactionContext,
|
||||
autoRollback: boolean
|
||||
): Promise<number> {
|
||||
if (!autoRollback || context.rollbackActions.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
this.logger.warn(`Executing ${context.rollbackActions.length} rollback actions [${context.id}]`);
|
||||
|
||||
let rollbacksExecuted = 0;
|
||||
|
||||
// Execute rollbacks in reverse order (LIFO)
|
||||
for (let i = context.rollbackActions.length - 1; i >= 0; i--) {
|
||||
try {
|
||||
await context.rollbackActions[i]();
|
||||
rollbacksExecuted++;
|
||||
this.logger.debug(`Rollback ${i + 1} completed [${context.id}]`);
|
||||
} catch (rollbackError) {
|
||||
this.logger.error(`Rollback ${i + 1} failed [${context.id}]`, {
|
||||
error: getErrorMessage(rollbackError)
|
||||
});
|
||||
// Continue with other rollbacks even if one fails
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.log(`Completed ${rollbacksExecuted}/${context.rollbackActions.length} rollbacks [${context.id}]`);
|
||||
return rollbacksExecuted;
|
||||
}
|
||||
|
||||
private isRetryableError(error: unknown): boolean {
|
||||
const errorMessage = getErrorMessage(error).toLowerCase();
|
||||
|
||||
// Retry on serialization failures, deadlocks, and temporary connection issues
|
||||
return (
|
||||
errorMessage.includes('serialization failure') ||
|
||||
errorMessage.includes('deadlock') ||
|
||||
errorMessage.includes('connection') ||
|
||||
errorMessage.includes('timeout') ||
|
||||
errorMessage.includes('lock wait timeout')
|
||||
);
|
||||
}
|
||||
|
||||
private async createTimeoutPromise<T>(timeout: number, transactionId: string): Promise<T> {
|
||||
return new Promise((_, reject) => {
|
||||
setTimeout(() => {
|
||||
reject(new Error(`Transaction timeout after ${timeout}ms [${transactionId}]`));
|
||||
}, timeout);
|
||||
});
|
||||
}
|
||||
|
||||
private async delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
private generateTransactionId(): string {
|
||||
return `tx_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get transaction statistics for monitoring
|
||||
*/
|
||||
async getTransactionStats() {
|
||||
// This could be enhanced with metrics collection
|
||||
return {
|
||||
activeTransactions: 0, // Would need to track active transactions
|
||||
totalTransactions: 0, // Would need to track total count
|
||||
successRate: 0, // Would need to track success/failure rates
|
||||
averageDuration: 0 // Would need to track durations
|
||||
};
|
||||
}
|
||||
}
|
||||
72
apps/bff/src/core/health/queue-health.controller.ts
Normal file
72
apps/bff/src/core/health/queue-health.controller.ts
Normal file
@ -0,0 +1,72 @@
|
||||
import { Controller, Get } from "@nestjs/common";
|
||||
import { ApiTags, ApiOperation, ApiResponse } from "@nestjs/swagger";
|
||||
import { WhmcsRequestQueueService } from "@bff/core/queue/services/whmcs-request-queue.service";
|
||||
import { SalesforceRequestQueueService } from "@bff/core/queue/services/salesforce-request-queue.service";
|
||||
|
||||
@ApiTags("Health")
|
||||
@Controller("health/queues")
|
||||
export class QueueHealthController {
|
||||
constructor(
|
||||
private readonly whmcsQueue: WhmcsRequestQueueService,
|
||||
private readonly salesforceQueue: SalesforceRequestQueueService
|
||||
) {}
|
||||
|
||||
@Get()
|
||||
@ApiOperation({
|
||||
summary: "Get queue health status",
|
||||
description: "Returns health status and metrics for WHMCS and Salesforce request queues"
|
||||
})
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: "Queue health status retrieved successfully"
|
||||
})
|
||||
getQueueHealth() {
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
whmcs: {
|
||||
health: this.whmcsQueue.getHealthStatus(),
|
||||
metrics: this.whmcsQueue.getMetrics(),
|
||||
},
|
||||
salesforce: {
|
||||
health: this.salesforceQueue.getHealthStatus(),
|
||||
metrics: this.salesforceQueue.getMetrics(),
|
||||
dailyUsage: this.salesforceQueue.getDailyUsage(),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@Get("whmcs")
|
||||
@ApiOperation({
|
||||
summary: "Get WHMCS queue metrics",
|
||||
description: "Returns detailed metrics for the WHMCS request queue"
|
||||
})
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: "WHMCS queue metrics retrieved successfully"
|
||||
})
|
||||
getWhmcsQueueMetrics() {
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
health: this.whmcsQueue.getHealthStatus(),
|
||||
metrics: this.whmcsQueue.getMetrics(),
|
||||
};
|
||||
}
|
||||
|
||||
@Get("salesforce")
|
||||
@ApiOperation({
|
||||
summary: "Get Salesforce queue metrics",
|
||||
description: "Returns detailed metrics for the Salesforce request queue including daily API usage"
|
||||
})
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: "Salesforce queue metrics retrieved successfully"
|
||||
})
|
||||
getSalesforceQueueMetrics() {
|
||||
return {
|
||||
timestamp: new Date().toISOString(),
|
||||
health: this.salesforceQueue.getHealthStatus(),
|
||||
metrics: this.salesforceQueue.getMetrics(),
|
||||
dailyUsage: this.salesforceQueue.getDailyUsage(),
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -10,12 +10,14 @@ import { Request, Response } from "express";
|
||||
import { getClientSafeErrorMessage } from "../utils/error.util";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { SecureErrorMapperService } from "../security/services/secure-error-mapper.service";
|
||||
|
||||
@Catch()
|
||||
export class GlobalExceptionFilter implements ExceptionFilter {
|
||||
constructor(
|
||||
@Inject(Logger) private readonly logger: Logger,
|
||||
private readonly configService: ConfigService
|
||||
private readonly configService: ConfigService,
|
||||
private readonly secureErrorMapper: SecureErrorMapperService
|
||||
) {}
|
||||
|
||||
catch(exception: unknown, host: ArgumentsHost): void {
|
||||
@ -23,67 +25,76 @@ export class GlobalExceptionFilter implements ExceptionFilter {
|
||||
const response = ctx.getResponse<Response>();
|
||||
const request = ctx.getRequest<Request>();
|
||||
|
||||
let status: number;
|
||||
let message: string;
|
||||
let error: string;
|
||||
// Create error context for secure mapping
|
||||
const errorContext = {
|
||||
userId: (request as any).user?.id,
|
||||
requestId: (request as any).requestId || this.generateRequestId(),
|
||||
userAgent: request.get("user-agent"),
|
||||
ip: request.ip,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
};
|
||||
|
||||
let status: number;
|
||||
let originalError: unknown = exception;
|
||||
|
||||
// Determine HTTP status
|
||||
if (exception instanceof HttpException) {
|
||||
status = exception.getStatus();
|
||||
|
||||
// Extract the actual error from HttpException response
|
||||
const exceptionResponse = exception.getResponse();
|
||||
|
||||
if (typeof exceptionResponse === "object" && exceptionResponse !== null) {
|
||||
const errorResponse = exceptionResponse as { message?: string; error?: string };
|
||||
message = errorResponse.message || exception.message;
|
||||
error = errorResponse.error || exception.constructor.name;
|
||||
originalError = errorResponse.message || exception.message;
|
||||
} else {
|
||||
message = typeof exceptionResponse === "string" ? exceptionResponse : exception.message;
|
||||
error = exception.constructor.name;
|
||||
originalError = typeof exceptionResponse === "string" ? exceptionResponse : exception.message;
|
||||
}
|
||||
} else {
|
||||
status = HttpStatus.INTERNAL_SERVER_ERROR;
|
||||
message = "Internal server error";
|
||||
error = "InternalServerError";
|
||||
|
||||
this.logger.error("Unhandled exception caught", {
|
||||
error: exception instanceof Error ? exception.message : String(exception),
|
||||
stack: exception instanceof Error ? exception.stack : undefined,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
userAgent: request.get("user-agent"),
|
||||
ip: request.ip,
|
||||
});
|
||||
originalError = exception;
|
||||
}
|
||||
|
||||
const clientSafeMessage =
|
||||
this.configService.get("NODE_ENV") === "production"
|
||||
? getClientSafeErrorMessage(message)
|
||||
: message;
|
||||
|
||||
const code = (error || "InternalServerError")
|
||||
.replace(/([a-z])([A-Z])/g, "$1_$2")
|
||||
.replace(/\s+/g, "_")
|
||||
.toUpperCase();
|
||||
// Use secure error mapper to get safe public message and log securely
|
||||
const errorClassification = this.secureErrorMapper.mapError(originalError, errorContext);
|
||||
const publicMessage = this.secureErrorMapper.getPublicMessage(originalError, errorContext);
|
||||
|
||||
// Log the error securely (this handles sensitive data filtering)
|
||||
this.secureErrorMapper.logSecureError(originalError, errorContext, {
|
||||
httpStatus: status,
|
||||
exceptionType: exception instanceof Error ? exception.constructor.name : 'Unknown'
|
||||
});
|
||||
|
||||
// Create secure error response
|
||||
const errorResponse = {
|
||||
success: false,
|
||||
statusCode: status,
|
||||
code,
|
||||
error,
|
||||
message: clientSafeMessage,
|
||||
code: errorClassification.mapping.code,
|
||||
error: errorClassification.category.toUpperCase(),
|
||||
message: publicMessage,
|
||||
timestamp: new Date().toISOString(),
|
||||
path: request.url,
|
||||
requestId: errorContext.requestId,
|
||||
};
|
||||
|
||||
this.logger.error(`HTTP ${status} Error`, {
|
||||
// Additional logging for monitoring (without sensitive data)
|
||||
this.logger.error(`HTTP ${status} Error [${errorClassification.mapping.code}]`, {
|
||||
statusCode: status,
|
||||
method: request.method,
|
||||
url: request.url,
|
||||
userAgent: request.get("user-agent"),
|
||||
ip: request.ip,
|
||||
error: error,
|
||||
messageLength: message.length,
|
||||
errorCode: errorClassification.mapping.code,
|
||||
category: errorClassification.category,
|
||||
severity: errorClassification.severity,
|
||||
requestId: errorContext.requestId,
|
||||
userId: errorContext.userId,
|
||||
});
|
||||
|
||||
response.status(status).json(errorResponse);
|
||||
}
|
||||
|
||||
private generateRequestId(): string {
|
||||
return `req_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1 +0,0 @@
|
||||
export {};
|
||||
9
apps/bff/src/core/queue/queue.module.ts
Normal file
9
apps/bff/src/core/queue/queue.module.ts
Normal file
@ -0,0 +1,9 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { WhmcsRequestQueueService } from "./services/whmcs-request-queue.service";
|
||||
import { SalesforceRequestQueueService } from "./services/salesforce-request-queue.service";
|
||||
|
||||
@Module({
|
||||
providers: [WhmcsRequestQueueService, SalesforceRequestQueueService],
|
||||
exports: [WhmcsRequestQueueService, SalesforceRequestQueueService],
|
||||
})
|
||||
export class QueueModule {}
|
||||
@ -0,0 +1,469 @@
|
||||
import { Injectable, Inject, OnModuleInit, OnModuleDestroy } from "@nestjs/common";
|
||||
import { Logger } from "nestjs-pino";
|
||||
|
||||
export interface SalesforceQueueMetrics {
|
||||
totalRequests: number;
|
||||
completedRequests: number;
|
||||
failedRequests: number;
|
||||
queueSize: number;
|
||||
pendingRequests: number;
|
||||
averageWaitTime: number;
|
||||
averageExecutionTime: number;
|
||||
dailyApiUsage: number;
|
||||
lastRequestTime?: Date;
|
||||
lastErrorTime?: Date;
|
||||
lastRateLimitTime?: Date;
|
||||
}
|
||||
|
||||
export interface SalesforceRequestOptions {
|
||||
priority?: number; // Higher number = higher priority (0-10)
|
||||
timeout?: number; // Request timeout in ms
|
||||
retryAttempts?: number; // Number of retry attempts
|
||||
retryDelay?: number; // Base delay between retries in ms
|
||||
isLongRunning?: boolean; // Mark as long-running request (>20s expected)
|
||||
}
|
||||
|
||||
/**
|
||||
* Salesforce Request Queue Service
|
||||
*
|
||||
* Manages concurrent requests to Salesforce API to prevent:
|
||||
* - Daily API limit exhaustion (100,000 + 1,000 per user)
|
||||
* - Concurrent request limit violations (25 long-running requests)
|
||||
* - Rate limit violations and 429 errors
|
||||
* - Optimal resource utilization
|
||||
*
|
||||
* Based on Salesforce documentation:
|
||||
* - Daily limit: 100,000 + (1,000 × users) per 24h
|
||||
* - Concurrent limit: 25 long-running requests (>20s)
|
||||
* - Timeout: 10 minutes per request
|
||||
* - Rate limiting: Conservative 120 requests per minute (2 RPS)
|
||||
*/
|
||||
@Injectable()
|
||||
export class SalesforceRequestQueueService implements OnModuleInit, OnModuleDestroy {
|
||||
private standardQueue: any;
|
||||
private longRunningQueue: any;
|
||||
private readonly metrics: SalesforceQueueMetrics = {
|
||||
totalRequests: 0,
|
||||
completedRequests: 0,
|
||||
failedRequests: 0,
|
||||
queueSize: 0,
|
||||
pendingRequests: 0,
|
||||
averageWaitTime: 0,
|
||||
averageExecutionTime: 0,
|
||||
dailyApiUsage: 0,
|
||||
};
|
||||
|
||||
private readonly waitTimes: number[] = [];
|
||||
private readonly executionTimes: number[] = [];
|
||||
private readonly maxMetricsHistory = 100;
|
||||
private dailyUsageResetTime: Date;
|
||||
|
||||
constructor(@Inject(Logger) private readonly logger: Logger) {
|
||||
this.dailyUsageResetTime = this.getNextDayReset();
|
||||
}
|
||||
|
||||
private async initializeQueues() {
|
||||
if (!this.standardQueue) {
|
||||
const { default: PQueue } = await import("p-queue");
|
||||
|
||||
// Optimized Salesforce requests queue for better user experience
|
||||
this.standardQueue = new PQueue({
|
||||
concurrency: 15, // Max 15 concurrent standard requests (increased from 10)
|
||||
interval: 60000, // Per minute
|
||||
intervalCap: 600, // Max 600 requests per minute (10 RPS - increased from 2 RPS)
|
||||
timeout: 30000, // 30 second default timeout
|
||||
throwOnTimeout: true,
|
||||
carryoverConcurrencyCount: true,
|
||||
});
|
||||
|
||||
// Long-running requests queue (separate to respect 25 concurrent limit)
|
||||
this.longRunningQueue = new PQueue({
|
||||
concurrency: 22, // Max 22 concurrent long-running (closer to 25 limit)
|
||||
timeout: 600000, // 10 minute timeout for long-running
|
||||
throwOnTimeout: true,
|
||||
carryoverConcurrencyCount: true,
|
||||
});
|
||||
|
||||
// Set up queue event listeners
|
||||
this.setupQueueListeners();
|
||||
}
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.initializeQueues();
|
||||
this.logger.log("Salesforce Request Queue initialized", {
|
||||
standardConcurrency: 15,
|
||||
longRunningConcurrency: 22,
|
||||
rateLimit: "600 requests/minute (10 RPS)",
|
||||
standardTimeout: "30 seconds",
|
||||
longRunningTimeout: "10 minutes",
|
||||
});
|
||||
}
|
||||
|
||||
async onModuleDestroy() {
|
||||
this.logger.log("Shutting down Salesforce Request Queue", {
|
||||
standardPending: this.standardQueue.pending,
|
||||
standardQueueSize: this.standardQueue.size,
|
||||
longRunningPending: this.longRunningQueue.pending,
|
||||
longRunningQueueSize: this.longRunningQueue.size,
|
||||
});
|
||||
|
||||
// Wait for pending requests to complete (with timeout)
|
||||
try {
|
||||
await Promise.all([
|
||||
this.standardQueue.onIdle(),
|
||||
this.longRunningQueue.onIdle(),
|
||||
]);
|
||||
} catch (error) {
|
||||
this.logger.warn("Some Salesforce requests may not have completed during shutdown", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a Salesforce API request through the appropriate queue
|
||||
*/
|
||||
async execute<T>(
|
||||
requestFn: () => Promise<T>,
|
||||
options: SalesforceRequestOptions = {}
|
||||
): Promise<T> {
|
||||
await this.initializeQueues();
|
||||
// Check daily API usage
|
||||
this.checkDailyUsage();
|
||||
|
||||
const startTime = Date.now();
|
||||
const requestId = this.generateRequestId();
|
||||
const isLongRunning = options.isLongRunning || false;
|
||||
const queue = isLongRunning ? this.longRunningQueue : this.standardQueue;
|
||||
|
||||
this.metrics.totalRequests++;
|
||||
this.metrics.dailyApiUsage++;
|
||||
this.updateQueueMetrics();
|
||||
|
||||
this.logger.debug("Queueing Salesforce request", {
|
||||
requestId,
|
||||
isLongRunning,
|
||||
queueSize: queue.size,
|
||||
pending: queue.pending,
|
||||
priority: options.priority || 0,
|
||||
dailyUsage: this.metrics.dailyApiUsage,
|
||||
});
|
||||
|
||||
try {
|
||||
const result = await queue.add(
|
||||
async () => {
|
||||
const waitTime = Date.now() - startTime;
|
||||
this.recordWaitTime(waitTime);
|
||||
|
||||
const executionStart = Date.now();
|
||||
|
||||
try {
|
||||
const response = await this.executeWithRetry(requestFn, options);
|
||||
|
||||
const executionTime = Date.now() - executionStart;
|
||||
this.recordExecutionTime(executionTime);
|
||||
this.metrics.completedRequests++;
|
||||
this.metrics.lastRequestTime = new Date();
|
||||
|
||||
this.logger.debug("Salesforce request completed", {
|
||||
requestId,
|
||||
isLongRunning,
|
||||
waitTime,
|
||||
executionTime,
|
||||
totalTime: Date.now() - startTime,
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
const executionTime = Date.now() - executionStart;
|
||||
this.recordExecutionTime(executionTime);
|
||||
this.metrics.failedRequests++;
|
||||
this.metrics.lastErrorTime = new Date();
|
||||
|
||||
// Check if it's a rate limit error
|
||||
if (this.isRateLimitError(error)) {
|
||||
this.metrics.lastRateLimitTime = new Date();
|
||||
this.logger.warn("Salesforce rate limit encountered", {
|
||||
requestId,
|
||||
dailyUsage: this.metrics.dailyApiUsage,
|
||||
});
|
||||
}
|
||||
|
||||
this.logger.error("Salesforce request failed", {
|
||||
requestId,
|
||||
isLongRunning,
|
||||
waitTime,
|
||||
executionTime,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
priority: options.priority || 0,
|
||||
}
|
||||
);
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.metrics.failedRequests++;
|
||||
this.metrics.lastErrorTime = new Date();
|
||||
throw error;
|
||||
} finally {
|
||||
this.updateQueueMetrics();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute high-priority Salesforce request (jumps queue)
|
||||
*/
|
||||
async executeHighPriority<T>(
|
||||
requestFn: () => Promise<T>,
|
||||
isLongRunning = false
|
||||
): Promise<T> {
|
||||
return this.execute(requestFn, { priority: 10, isLongRunning });
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute long-running Salesforce request (uses separate queue)
|
||||
*/
|
||||
async executeLongRunning<T>(requestFn: () => Promise<T>): Promise<T> {
|
||||
return this.execute(requestFn, { isLongRunning: true, timeout: 600000 });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current queue metrics
|
||||
*/
|
||||
getMetrics(): SalesforceQueueMetrics {
|
||||
this.updateQueueMetrics();
|
||||
return { ...this.metrics };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue health status
|
||||
*/
|
||||
getHealthStatus(): {
|
||||
status: "healthy" | "degraded" | "unhealthy";
|
||||
queueSize: number;
|
||||
pendingRequests: number;
|
||||
errorRate: number;
|
||||
dailyUsagePercent: number;
|
||||
averageWaitTime: number;
|
||||
} {
|
||||
this.updateQueueMetrics();
|
||||
|
||||
const errorRate = this.metrics.totalRequests > 0
|
||||
? this.metrics.failedRequests / this.metrics.totalRequests
|
||||
: 0;
|
||||
|
||||
// Estimate daily limit (conservative: 150,000 for ~50 users)
|
||||
const estimatedDailyLimit = 150000;
|
||||
const dailyUsagePercent = this.metrics.dailyApiUsage / estimatedDailyLimit;
|
||||
|
||||
let status: "healthy" | "degraded" | "unhealthy" = "healthy";
|
||||
|
||||
// Adjusted thresholds for higher throughput (15 concurrent, 10 RPS)
|
||||
if (
|
||||
this.metrics.queueSize > 200 ||
|
||||
errorRate > 0.1 ||
|
||||
dailyUsagePercent > 0.9
|
||||
) {
|
||||
status = "unhealthy";
|
||||
} else if (
|
||||
this.metrics.queueSize > 80 ||
|
||||
errorRate > 0.05 ||
|
||||
dailyUsagePercent > 0.7
|
||||
) {
|
||||
status = "degraded";
|
||||
}
|
||||
|
||||
return {
|
||||
status,
|
||||
queueSize: this.metrics.queueSize,
|
||||
pendingRequests: this.metrics.pendingRequests,
|
||||
errorRate,
|
||||
dailyUsagePercent,
|
||||
averageWaitTime: this.metrics.averageWaitTime,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get daily API usage information
|
||||
*/
|
||||
getDailyUsage(): {
|
||||
usage: number;
|
||||
resetTime: Date;
|
||||
hoursUntilReset: number;
|
||||
} {
|
||||
return {
|
||||
usage: this.metrics.dailyApiUsage,
|
||||
resetTime: this.dailyUsageResetTime,
|
||||
hoursUntilReset: Math.ceil(
|
||||
(this.dailyUsageResetTime.getTime() - Date.now()) / (1000 * 60 * 60)
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the queues (emergency use only)
|
||||
*/
|
||||
async clearQueues(): Promise<void> {
|
||||
this.logger.warn("Clearing Salesforce request queues", {
|
||||
standardQueueSize: this.standardQueue.size,
|
||||
standardPending: this.standardQueue.pending,
|
||||
longRunningQueueSize: this.longRunningQueue.size,
|
||||
longRunningPending: this.longRunningQueue.pending,
|
||||
});
|
||||
|
||||
this.standardQueue.clear();
|
||||
this.longRunningQueue.clear();
|
||||
|
||||
await Promise.all([
|
||||
this.standardQueue.onIdle(),
|
||||
this.longRunningQueue.onIdle(),
|
||||
]);
|
||||
}
|
||||
|
||||
private async executeWithRetry<T>(
|
||||
requestFn: () => Promise<T>,
|
||||
options: SalesforceRequestOptions
|
||||
): Promise<T> {
|
||||
const maxAttempts = options.retryAttempts || 3;
|
||||
const baseDelay = options.retryDelay || 1000;
|
||||
|
||||
let lastError: Error | undefined;
|
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
return await requestFn();
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
if (attempt === maxAttempts) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Special handling for rate limit errors
|
||||
let delay = baseDelay * Math.pow(2, attempt - 1);
|
||||
|
||||
if (this.isRateLimitError(error)) {
|
||||
// Longer delay for rate limit errors
|
||||
delay = Math.max(delay, 30000); // At least 30 seconds
|
||||
}
|
||||
|
||||
// Add jitter
|
||||
delay += Math.random() * 1000;
|
||||
|
||||
this.logger.debug("Salesforce request failed, retrying", {
|
||||
attempt,
|
||||
maxAttempts,
|
||||
delay,
|
||||
isRateLimit: this.isRateLimitError(error),
|
||||
error: lastError.message,
|
||||
});
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
private isRateLimitError(error: unknown): boolean {
|
||||
if (error instanceof Error) {
|
||||
const message = error.message.toLowerCase();
|
||||
return (
|
||||
message.includes("rate limit") ||
|
||||
message.includes("too many requests") ||
|
||||
message.includes("429") ||
|
||||
message.includes("request limit exceeded")
|
||||
);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private checkDailyUsage(): void {
|
||||
const now = new Date();
|
||||
|
||||
// Reset daily usage if we've passed the reset time
|
||||
if (now >= this.dailyUsageResetTime) {
|
||||
this.metrics.dailyApiUsage = 0;
|
||||
this.dailyUsageResetTime = this.getNextDayReset();
|
||||
|
||||
this.logger.log("Daily Salesforce API usage reset", {
|
||||
resetTime: this.dailyUsageResetTime,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private getNextDayReset(): Date {
|
||||
const tomorrow = new Date();
|
||||
tomorrow.setDate(tomorrow.getDate() + 1);
|
||||
tomorrow.setHours(0, 0, 0, 0);
|
||||
return tomorrow;
|
||||
}
|
||||
|
||||
private setupQueueListeners(): void {
|
||||
// Standard queue listeners
|
||||
this.standardQueue.on("add", () => this.updateQueueMetrics());
|
||||
this.standardQueue.on("next", () => this.updateQueueMetrics());
|
||||
this.standardQueue.on("idle", () => {
|
||||
this.logger.debug("Salesforce standard queue is idle");
|
||||
this.updateQueueMetrics();
|
||||
});
|
||||
this.standardQueue.on("error", (error: Error) => {
|
||||
this.logger.error("Salesforce standard queue error", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
});
|
||||
|
||||
// Long-running queue listeners
|
||||
this.longRunningQueue.on("add", () => this.updateQueueMetrics());
|
||||
this.longRunningQueue.on("next", () => this.updateQueueMetrics());
|
||||
this.longRunningQueue.on("idle", () => {
|
||||
this.logger.debug("Salesforce long-running queue is idle");
|
||||
this.updateQueueMetrics();
|
||||
});
|
||||
this.longRunningQueue.on("error", (error: Error) => {
|
||||
this.logger.error("Salesforce long-running queue error", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private updateQueueMetrics(): void {
|
||||
this.metrics.queueSize = this.standardQueue.size + this.longRunningQueue.size;
|
||||
this.metrics.pendingRequests = this.standardQueue.pending + this.longRunningQueue.pending;
|
||||
|
||||
// Calculate averages
|
||||
if (this.waitTimes.length > 0) {
|
||||
this.metrics.averageWaitTime =
|
||||
this.waitTimes.reduce((sum, time) => sum + time, 0) / this.waitTimes.length;
|
||||
}
|
||||
|
||||
if (this.executionTimes.length > 0) {
|
||||
this.metrics.averageExecutionTime =
|
||||
this.executionTimes.reduce((sum, time) => sum + time, 0) / this.executionTimes.length;
|
||||
}
|
||||
}
|
||||
|
||||
private recordWaitTime(waitTime: number): void {
|
||||
this.waitTimes.push(waitTime);
|
||||
if (this.waitTimes.length > this.maxMetricsHistory) {
|
||||
this.waitTimes.shift();
|
||||
}
|
||||
}
|
||||
|
||||
private recordExecutionTime(executionTime: number): void {
|
||||
this.executionTimes.push(executionTime);
|
||||
if (this.executionTimes.length > this.maxMetricsHistory) {
|
||||
this.executionTimes.shift();
|
||||
}
|
||||
}
|
||||
|
||||
private generateRequestId(): string {
|
||||
return `sf_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
}
|
||||
328
apps/bff/src/core/queue/services/whmcs-request-queue.service.ts
Normal file
328
apps/bff/src/core/queue/services/whmcs-request-queue.service.ts
Normal file
@ -0,0 +1,328 @@
|
||||
import { Injectable, Inject, OnModuleInit, OnModuleDestroy } from "@nestjs/common";
|
||||
import { Logger } from "nestjs-pino";
|
||||
|
||||
export interface WhmcsQueueMetrics {
|
||||
totalRequests: number;
|
||||
completedRequests: number;
|
||||
failedRequests: number;
|
||||
queueSize: number;
|
||||
pendingRequests: number;
|
||||
averageWaitTime: number;
|
||||
averageExecutionTime: number;
|
||||
lastRequestTime?: Date;
|
||||
lastErrorTime?: Date;
|
||||
}
|
||||
|
||||
export interface WhmcsRequestOptions {
|
||||
priority?: number; // Higher number = higher priority (0-10)
|
||||
timeout?: number; // Request timeout in ms
|
||||
retryAttempts?: number; // Number of retry attempts
|
||||
retryDelay?: number; // Base delay between retries in ms
|
||||
}
|
||||
|
||||
/**
|
||||
* WHMCS Request Queue Service
|
||||
*
|
||||
* Manages concurrent requests to WHMCS API to prevent:
|
||||
* - Database connection pool exhaustion
|
||||
* - Server overload from parallel requests
|
||||
* - Rate limit violations (conservative approach)
|
||||
* - Resource contention issues
|
||||
*
|
||||
* Based on research:
|
||||
* - WHMCS has no official rate limits but performance degrades with high concurrency
|
||||
* - Conservative approach: max 3 concurrent requests
|
||||
* - Rate limiting: max 30 requests per minute (0.5 RPS)
|
||||
*/
|
||||
@Injectable()
|
||||
export class WhmcsRequestQueueService implements OnModuleInit, OnModuleDestroy {
|
||||
private queue: any;
|
||||
private readonly metrics: WhmcsQueueMetrics = {
|
||||
totalRequests: 0,
|
||||
completedRequests: 0,
|
||||
failedRequests: 0,
|
||||
queueSize: 0,
|
||||
pendingRequests: 0,
|
||||
averageWaitTime: 0,
|
||||
averageExecutionTime: 0,
|
||||
};
|
||||
|
||||
private readonly waitTimes: number[] = [];
|
||||
private readonly executionTimes: number[] = [];
|
||||
private readonly maxMetricsHistory = 100;
|
||||
|
||||
constructor(@Inject(Logger) private readonly logger: Logger) {}
|
||||
|
||||
private async initializeQueue() {
|
||||
if (!this.queue) {
|
||||
const { default: PQueue } = await import("p-queue");
|
||||
|
||||
// Optimized WHMCS queue configuration for better user experience
|
||||
this.queue = new PQueue({
|
||||
concurrency: 15, // Max 15 concurrent WHMCS requests (matches Salesforce)
|
||||
interval: 60000, // Per minute
|
||||
intervalCap: 300, // Max 300 requests per minute (5 RPS - increased from 0.5 RPS)
|
||||
timeout: 30000, // 30 second default timeout
|
||||
throwOnTimeout: true,
|
||||
carryoverConcurrencyCount: true,
|
||||
});
|
||||
|
||||
// Set up queue event listeners
|
||||
this.setupQueueListeners();
|
||||
}
|
||||
}
|
||||
|
||||
async onModuleInit() {
|
||||
await this.initializeQueue();
|
||||
this.logger.log("WHMCS Request Queue initialized", {
|
||||
concurrency: 15,
|
||||
rateLimit: "300 requests/minute (5 RPS)",
|
||||
timeout: "30 seconds",
|
||||
});
|
||||
}
|
||||
|
||||
async onModuleDestroy() {
|
||||
this.logger.log("Shutting down WHMCS Request Queue", {
|
||||
pendingRequests: this.queue.pending,
|
||||
queueSize: this.queue.size,
|
||||
});
|
||||
|
||||
// Wait for pending requests to complete (with timeout)
|
||||
try {
|
||||
await this.queue.onIdle();
|
||||
} catch (error) {
|
||||
this.logger.warn("Some WHMCS requests may not have completed during shutdown", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a WHMCS API request through the queue
|
||||
*/
|
||||
async execute<T>(
|
||||
requestFn: () => Promise<T>,
|
||||
options: WhmcsRequestOptions = {}
|
||||
): Promise<T> {
|
||||
await this.initializeQueue();
|
||||
const startTime = Date.now();
|
||||
const requestId = this.generateRequestId();
|
||||
|
||||
this.metrics.totalRequests++;
|
||||
this.updateQueueMetrics();
|
||||
|
||||
this.logger.debug("Queueing WHMCS request", {
|
||||
requestId,
|
||||
queueSize: this.queue.size,
|
||||
pending: this.queue.pending,
|
||||
priority: options.priority || 0,
|
||||
});
|
||||
|
||||
try {
|
||||
const result = await this.queue.add(
|
||||
async () => {
|
||||
const waitTime = Date.now() - startTime;
|
||||
this.recordWaitTime(waitTime);
|
||||
|
||||
const executionStart = Date.now();
|
||||
|
||||
try {
|
||||
const response = await this.executeWithRetry(requestFn, options);
|
||||
|
||||
const executionTime = Date.now() - executionStart;
|
||||
this.recordExecutionTime(executionTime);
|
||||
this.metrics.completedRequests++;
|
||||
this.metrics.lastRequestTime = new Date();
|
||||
|
||||
this.logger.debug("WHMCS request completed", {
|
||||
requestId,
|
||||
waitTime,
|
||||
executionTime,
|
||||
totalTime: Date.now() - startTime,
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
const executionTime = Date.now() - executionStart;
|
||||
this.recordExecutionTime(executionTime);
|
||||
this.metrics.failedRequests++;
|
||||
this.metrics.lastErrorTime = new Date();
|
||||
|
||||
this.logger.error("WHMCS request failed", {
|
||||
requestId,
|
||||
waitTime,
|
||||
executionTime,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
{
|
||||
priority: options.priority || 0,
|
||||
}
|
||||
);
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.metrics.failedRequests++;
|
||||
this.metrics.lastErrorTime = new Date();
|
||||
throw error;
|
||||
} finally {
|
||||
this.updateQueueMetrics();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute high-priority WHMCS request (jumps queue)
|
||||
*/
|
||||
async executeHighPriority<T>(requestFn: () => Promise<T>): Promise<T> {
|
||||
return this.execute(requestFn, { priority: 10 });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current queue metrics
|
||||
*/
|
||||
getMetrics(): WhmcsQueueMetrics {
|
||||
this.updateQueueMetrics();
|
||||
return { ...this.metrics };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue health status
|
||||
*/
|
||||
getHealthStatus(): {
|
||||
status: "healthy" | "degraded" | "unhealthy";
|
||||
queueSize: number;
|
||||
pendingRequests: number;
|
||||
errorRate: number;
|
||||
averageWaitTime: number;
|
||||
} {
|
||||
this.updateQueueMetrics();
|
||||
|
||||
const errorRate = this.metrics.totalRequests > 0
|
||||
? this.metrics.failedRequests / this.metrics.totalRequests
|
||||
: 0;
|
||||
|
||||
let status: "healthy" | "degraded" | "unhealthy" = "healthy";
|
||||
|
||||
// Adjusted thresholds for higher throughput (15 concurrent, 5 RPS)
|
||||
if (this.metrics.queueSize > 120 || errorRate > 0.1) {
|
||||
status = "unhealthy";
|
||||
} else if (this.metrics.queueSize > 50 || errorRate > 0.05) {
|
||||
status = "degraded";
|
||||
}
|
||||
|
||||
return {
|
||||
status,
|
||||
queueSize: this.metrics.queueSize,
|
||||
pendingRequests: this.metrics.pendingRequests,
|
||||
errorRate,
|
||||
averageWaitTime: this.metrics.averageWaitTime,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the queue (emergency use only)
|
||||
*/
|
||||
async clearQueue(): Promise<void> {
|
||||
this.logger.warn("Clearing WHMCS request queue", {
|
||||
queueSize: this.queue.size,
|
||||
pendingRequests: this.queue.pending,
|
||||
});
|
||||
|
||||
this.queue.clear();
|
||||
await this.queue.onIdle();
|
||||
}
|
||||
|
||||
private async executeWithRetry<T>(
|
||||
requestFn: () => Promise<T>,
|
||||
options: WhmcsRequestOptions
|
||||
): Promise<T> {
|
||||
const maxAttempts = options.retryAttempts || 3;
|
||||
const baseDelay = options.retryDelay || 1000;
|
||||
|
||||
let lastError: Error | undefined;
|
||||
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
try {
|
||||
return await requestFn();
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
|
||||
if (attempt === maxAttempts) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Exponential backoff with jitter
|
||||
const delay = baseDelay * Math.pow(2, attempt - 1) + Math.random() * 1000;
|
||||
|
||||
this.logger.debug("WHMCS request failed, retrying", {
|
||||
attempt,
|
||||
maxAttempts,
|
||||
delay,
|
||||
error: lastError.message,
|
||||
});
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
private setupQueueListeners(): void {
|
||||
this.queue.on("add", () => {
|
||||
this.updateQueueMetrics();
|
||||
});
|
||||
|
||||
this.queue.on("next", () => {
|
||||
this.updateQueueMetrics();
|
||||
});
|
||||
|
||||
this.queue.on("idle", () => {
|
||||
this.logger.debug("WHMCS queue is idle");
|
||||
this.updateQueueMetrics();
|
||||
});
|
||||
|
||||
this.queue.on("error", (error: Error) => {
|
||||
this.logger.error("WHMCS queue error", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private updateQueueMetrics(): void {
|
||||
this.metrics.queueSize = this.queue.size;
|
||||
this.metrics.pendingRequests = this.queue.pending;
|
||||
|
||||
// Calculate averages
|
||||
if (this.waitTimes.length > 0) {
|
||||
this.metrics.averageWaitTime =
|
||||
this.waitTimes.reduce((sum, time) => sum + time, 0) / this.waitTimes.length;
|
||||
}
|
||||
|
||||
if (this.executionTimes.length > 0) {
|
||||
this.metrics.averageExecutionTime =
|
||||
this.executionTimes.reduce((sum, time) => sum + time, 0) / this.executionTimes.length;
|
||||
}
|
||||
}
|
||||
|
||||
private recordWaitTime(waitTime: number): void {
|
||||
this.waitTimes.push(waitTime);
|
||||
if (this.waitTimes.length > this.maxMetricsHistory) {
|
||||
this.waitTimes.shift();
|
||||
}
|
||||
}
|
||||
|
||||
private recordExecutionTime(executionTime: number): void {
|
||||
this.executionTimes.push(executionTime);
|
||||
if (this.executionTimes.length > this.maxMetricsHistory) {
|
||||
this.executionTimes.shift();
|
||||
}
|
||||
}
|
||||
|
||||
private generateRequestId(): string {
|
||||
return `whmcs_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
}
|
||||
167
apps/bff/src/core/security/controllers/csrf.controller.ts
Normal file
167
apps/bff/src/core/security/controllers/csrf.controller.ts
Normal file
@ -0,0 +1,167 @@
|
||||
import { Controller, Get, Post, Req, Res, UseGuards, Inject } from "@nestjs/common";
|
||||
import { ApiTags, ApiOperation, ApiResponse, ApiBearerAuth } from "@nestjs/swagger";
|
||||
import type { Request, Response } from "express";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { CsrfService } from "../services/csrf.service";
|
||||
|
||||
interface AuthenticatedRequest extends Request {
|
||||
user?: { id: string; sessionId?: string };
|
||||
}
|
||||
|
||||
@ApiTags('Security')
|
||||
@Controller('security/csrf')
|
||||
export class CsrfController {
|
||||
constructor(
|
||||
private readonly csrfService: CsrfService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
) {}
|
||||
|
||||
@Get('token')
|
||||
@ApiOperation({
|
||||
summary: 'Get CSRF token',
|
||||
description: 'Generates and returns a new CSRF token for the current session'
|
||||
})
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: 'CSRF token generated successfully',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
success: { type: 'boolean', example: true },
|
||||
token: { type: 'string', example: 'abc123...' },
|
||||
expiresAt: { type: 'string', format: 'date-time' }
|
||||
}
|
||||
}
|
||||
})
|
||||
getCsrfToken(@Req() req: AuthenticatedRequest, @Res() res: Response) {
|
||||
const sessionId = req.user?.sessionId || this.extractSessionId(req) || undefined;
|
||||
const userId = req.user?.id;
|
||||
|
||||
// Generate new CSRF token
|
||||
const tokenData = this.csrfService.generateToken(sessionId, userId);
|
||||
|
||||
// Set CSRF secret in secure cookie
|
||||
res.cookie('csrf-secret', tokenData.secret, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'strict',
|
||||
maxAge: 3600000, // 1 hour
|
||||
path: '/',
|
||||
});
|
||||
|
||||
this.logger.debug("CSRF token requested", {
|
||||
userId,
|
||||
sessionId,
|
||||
userAgent: req.get('user-agent'),
|
||||
ip: req.ip
|
||||
});
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
token: tokenData.token,
|
||||
expiresAt: tokenData.expiresAt.toISOString()
|
||||
});
|
||||
}
|
||||
|
||||
@Post('refresh')
|
||||
@ApiBearerAuth()
|
||||
@ApiOperation({
|
||||
summary: 'Refresh CSRF token',
|
||||
description: 'Invalidates current token and generates a new one for authenticated users'
|
||||
})
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: 'CSRF token refreshed successfully',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
success: { type: 'boolean', example: true },
|
||||
token: { type: 'string', example: 'xyz789...' },
|
||||
expiresAt: { type: 'string', format: 'date-time' }
|
||||
}
|
||||
}
|
||||
})
|
||||
refreshCsrfToken(@Req() req: AuthenticatedRequest, @Res() res: Response) {
|
||||
const sessionId = req.user?.sessionId || this.extractSessionId(req) || undefined;
|
||||
const userId = req.user?.id || 'anonymous'; // Default for unauthenticated users
|
||||
|
||||
// Invalidate existing tokens for this user
|
||||
this.csrfService.invalidateUserTokens(userId);
|
||||
|
||||
// Generate new CSRF token
|
||||
const tokenData = this.csrfService.generateToken(sessionId, userId);
|
||||
|
||||
// Set CSRF secret in secure cookie
|
||||
res.cookie('csrf-secret', tokenData.secret, {
|
||||
httpOnly: true,
|
||||
secure: process.env.NODE_ENV === 'production',
|
||||
sameSite: 'strict',
|
||||
maxAge: 3600000, // 1 hour
|
||||
path: '/',
|
||||
});
|
||||
|
||||
this.logger.debug("CSRF token refreshed", {
|
||||
userId,
|
||||
sessionId,
|
||||
userAgent: req.get('user-agent'),
|
||||
ip: req.ip
|
||||
});
|
||||
|
||||
return res.json({
|
||||
success: true,
|
||||
token: tokenData.token,
|
||||
expiresAt: tokenData.expiresAt.toISOString()
|
||||
});
|
||||
}
|
||||
|
||||
@Get('stats')
|
||||
@ApiBearerAuth()
|
||||
@ApiOperation({
|
||||
summary: 'Get CSRF token statistics',
|
||||
description: 'Returns statistics about CSRF tokens (admin/monitoring endpoint)'
|
||||
})
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: 'CSRF token statistics',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
success: { type: 'boolean', example: true },
|
||||
stats: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
totalTokens: { type: 'number', example: 150 },
|
||||
activeTokens: { type: 'number', example: 120 },
|
||||
expiredTokens: { type: 'number', example: 30 },
|
||||
cacheSize: { type: 'number', example: 150 },
|
||||
maxCacheSize: { type: 'number', example: 10000 }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
getCsrfStats(@Req() req: AuthenticatedRequest) {
|
||||
const userId = req.user?.id || 'anonymous';
|
||||
|
||||
// Only allow admin users to see stats (you might want to add role checking)
|
||||
this.logger.debug("CSRF stats requested", {
|
||||
userId,
|
||||
userAgent: req.get('user-agent'),
|
||||
ip: req.ip
|
||||
});
|
||||
|
||||
const stats = this.csrfService.getTokenStats();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
stats
|
||||
};
|
||||
}
|
||||
|
||||
private extractSessionId(req: AuthenticatedRequest): string | null {
|
||||
return req.cookies?.['session-id'] ||
|
||||
req.cookies?.['connect.sid'] ||
|
||||
(req as any).sessionID ||
|
||||
null;
|
||||
}
|
||||
}
|
||||
221
apps/bff/src/core/security/middleware/csrf.middleware.ts
Normal file
221
apps/bff/src/core/security/middleware/csrf.middleware.ts
Normal file
@ -0,0 +1,221 @@
|
||||
import { Injectable, NestMiddleware, ForbiddenException, Inject } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import type { Request, Response, NextFunction } from "express";
|
||||
import { CsrfService } from "../services/csrf.service";
|
||||
|
||||
interface CsrfRequest extends Request {
|
||||
csrfToken?: string;
|
||||
user?: { id: string; sessionId?: string };
|
||||
sessionID?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* CSRF Protection Middleware
|
||||
* Implements double-submit cookie pattern with additional security measures
|
||||
*/
|
||||
@Injectable()
|
||||
export class CsrfMiddleware implements NestMiddleware {
|
||||
private readonly isProduction: boolean;
|
||||
private readonly exemptPaths: Set<string>;
|
||||
private readonly exemptMethods: Set<string>;
|
||||
|
||||
constructor(
|
||||
private readonly csrfService: CsrfService,
|
||||
private readonly configService: ConfigService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
) {
|
||||
this.isProduction = this.configService.get("NODE_ENV") === "production";
|
||||
|
||||
// Paths that don't require CSRF protection
|
||||
this.exemptPaths = new Set([
|
||||
'/api/auth/login',
|
||||
'/api/auth/signup',
|
||||
'/api/auth/refresh',
|
||||
'/api/health',
|
||||
'/docs',
|
||||
'/api/webhooks', // Webhooks typically don't use CSRF
|
||||
]);
|
||||
|
||||
// Methods that don't require CSRF protection (safe methods)
|
||||
this.exemptMethods = new Set(['GET', 'HEAD', 'OPTIONS']);
|
||||
}
|
||||
|
||||
use(req: CsrfRequest, res: Response, next: NextFunction): void {
|
||||
// Skip CSRF protection for exempt paths and methods
|
||||
if (this.isExempt(req)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// For state-changing requests, validate CSRF token
|
||||
if (this.requiresCsrfProtection(req)) {
|
||||
this.validateCsrfToken(req, res, next);
|
||||
} else {
|
||||
// For safe requests, generate and set CSRF token if needed
|
||||
this.ensureCsrfToken(req, res, next);
|
||||
}
|
||||
}
|
||||
|
||||
private isExempt(req: CsrfRequest): boolean {
|
||||
// Check if path is exempt
|
||||
if (this.exemptPaths.has(req.path)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if method is exempt (safe methods)
|
||||
if (this.exemptMethods.has(req.method)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for API endpoints that might be exempt
|
||||
if (req.path.startsWith('/api/webhooks/')) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private requiresCsrfProtection(req: CsrfRequest): boolean {
|
||||
// State-changing methods require CSRF protection
|
||||
return ['POST', 'PUT', 'PATCH', 'DELETE'].includes(req.method);
|
||||
}
|
||||
|
||||
private validateCsrfToken(req: CsrfRequest, res: Response, next: NextFunction): void {
|
||||
const token = this.extractTokenFromRequest(req);
|
||||
const secret = this.extractSecretFromCookie(req);
|
||||
const sessionId = req.user?.sessionId || this.extractSessionId(req);
|
||||
const userId = req.user?.id;
|
||||
|
||||
if (!token) {
|
||||
this.logger.warn("CSRF validation failed - missing token", {
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
userAgent: req.get('user-agent'),
|
||||
ip: req.ip
|
||||
});
|
||||
throw new ForbiddenException("CSRF token required");
|
||||
}
|
||||
|
||||
if (!secret) {
|
||||
this.logger.warn("CSRF validation failed - missing secret cookie", {
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
userAgent: req.get('user-agent'),
|
||||
ip: req.ip
|
||||
});
|
||||
throw new ForbiddenException("CSRF secret required");
|
||||
}
|
||||
|
||||
const validationResult = this.csrfService.validateToken(token, secret, sessionId || undefined, userId);
|
||||
|
||||
if (!validationResult.isValid) {
|
||||
this.logger.warn("CSRF validation failed", {
|
||||
reason: validationResult.reason,
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
userAgent: req.get('user-agent'),
|
||||
ip: req.ip,
|
||||
userId,
|
||||
sessionId
|
||||
});
|
||||
throw new ForbiddenException(`CSRF validation failed: ${validationResult.reason}`);
|
||||
}
|
||||
|
||||
// Store validated token in request for potential use by controllers
|
||||
req.csrfToken = token;
|
||||
|
||||
this.logger.debug("CSRF validation successful", {
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
userId,
|
||||
sessionId
|
||||
});
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
private ensureCsrfToken(req: CsrfRequest, res: Response, next: NextFunction): void {
|
||||
const existingSecret = this.extractSecretFromCookie(req);
|
||||
const sessionId = req.user?.sessionId || this.extractSessionId(req);
|
||||
const userId = req.user?.id;
|
||||
|
||||
// If we already have a valid secret, we don't need to generate a new token
|
||||
if (existingSecret) {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Generate new CSRF token
|
||||
const tokenData = this.csrfService.generateToken(sessionId || undefined, userId);
|
||||
|
||||
// Set CSRF secret in secure, SameSite cookie
|
||||
this.setCsrfSecretCookie(res, tokenData.secret);
|
||||
|
||||
// Set CSRF token in response header for client to use
|
||||
res.setHeader('X-CSRF-Token', tokenData.token);
|
||||
|
||||
this.logger.debug("CSRF token generated and set", {
|
||||
method: req.method,
|
||||
path: req.path,
|
||||
userId,
|
||||
sessionId
|
||||
});
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
private extractTokenFromRequest(req: CsrfRequest): string | null {
|
||||
// Check multiple possible locations for the CSRF token
|
||||
|
||||
// 1. X-CSRF-Token header (most common)
|
||||
let token = req.get('X-CSRF-Token');
|
||||
if (token) return token;
|
||||
|
||||
// 2. X-Requested-With header (alternative)
|
||||
token = req.get('X-Requested-With');
|
||||
if (token && token !== 'XMLHttpRequest') return token;
|
||||
|
||||
// 3. Authorization header (if using Bearer token pattern)
|
||||
const authHeader = req.get('Authorization');
|
||||
if (authHeader && authHeader.startsWith('CSRF ')) {
|
||||
return authHeader.substring(5);
|
||||
}
|
||||
|
||||
// 4. Request body (for form submissions)
|
||||
if (req.body && typeof req.body === 'object') {
|
||||
token = req.body._csrf || req.body.csrfToken;
|
||||
if (token) return token;
|
||||
}
|
||||
|
||||
// 5. Query parameter (least secure, only for GET requests)
|
||||
if (req.method === 'GET') {
|
||||
token = req.query._csrf as string || req.query.csrfToken as string;
|
||||
if (token) return token;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private extractSecretFromCookie(req: CsrfRequest): string | null {
|
||||
return req.cookies?.['csrf-secret'] || null;
|
||||
}
|
||||
|
||||
private extractSessionId(req: CsrfRequest): string | null {
|
||||
// Try to extract session ID from various sources
|
||||
return req.cookies?.['session-id'] ||
|
||||
req.cookies?.['connect.sid'] ||
|
||||
req.sessionID ||
|
||||
null;
|
||||
}
|
||||
|
||||
private setCsrfSecretCookie(res: Response, secret: string): void {
|
||||
const cookieOptions = {
|
||||
httpOnly: true,
|
||||
secure: this.isProduction,
|
||||
sameSite: 'strict' as const,
|
||||
maxAge: 3600000, // 1 hour
|
||||
path: '/',
|
||||
};
|
||||
|
||||
res.cookie('csrf-secret', secret, cookieOptions);
|
||||
}
|
||||
}
|
||||
21
apps/bff/src/core/security/security.module.ts
Normal file
21
apps/bff/src/core/security/security.module.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import { Module, MiddlewareConsumer, NestModule } from "@nestjs/common";
|
||||
import { ConfigModule } from "@nestjs/config";
|
||||
import { SecureErrorMapperService } from "./services/secure-error-mapper.service";
|
||||
import { CsrfService } from "./services/csrf.service";
|
||||
import { CsrfMiddleware } from "./middleware/csrf.middleware";
|
||||
import { CsrfController } from "./controllers/csrf.controller";
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule],
|
||||
controllers: [CsrfController],
|
||||
providers: [SecureErrorMapperService, CsrfService, CsrfMiddleware],
|
||||
exports: [SecureErrorMapperService, CsrfService],
|
||||
})
|
||||
export class SecurityModule implements NestModule {
|
||||
configure(consumer: MiddlewareConsumer) {
|
||||
// Apply CSRF middleware to all routes except those handled by the middleware itself
|
||||
consumer
|
||||
.apply(CsrfMiddleware)
|
||||
.forRoutes('*');
|
||||
}
|
||||
}
|
||||
309
apps/bff/src/core/security/services/csrf.service.ts
Normal file
309
apps/bff/src/core/security/services/csrf.service.ts
Normal file
@ -0,0 +1,309 @@
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import * as crypto from "crypto";
|
||||
|
||||
export interface CsrfTokenData {
|
||||
token: string;
|
||||
secret: string;
|
||||
expiresAt: Date;
|
||||
sessionId?: string;
|
||||
userId?: string;
|
||||
}
|
||||
|
||||
export interface CsrfValidationResult {
|
||||
isValid: boolean;
|
||||
reason?: string;
|
||||
tokenData?: CsrfTokenData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Service for CSRF token generation and validation
|
||||
* Implements double-submit cookie pattern with additional security measures
|
||||
*/
|
||||
@Injectable()
|
||||
export class CsrfService {
|
||||
private readonly tokenExpiry: number; // Token expiry in milliseconds
|
||||
private readonly secretKey: string;
|
||||
private readonly tokenCache = new Map<string, CsrfTokenData>();
|
||||
private readonly maxCacheSize = 10000; // Prevent memory leaks
|
||||
|
||||
constructor(
|
||||
private readonly configService: ConfigService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
) {
|
||||
this.tokenExpiry = Number(this.configService.get("CSRF_TOKEN_EXPIRY", "3600000")); // 1 hour default
|
||||
this.secretKey = this.configService.get("CSRF_SECRET_KEY") || this.generateSecretKey();
|
||||
|
||||
if (!this.configService.get("CSRF_SECRET_KEY")) {
|
||||
this.logger.warn("CSRF_SECRET_KEY not configured, using generated key (not suitable for production)");
|
||||
}
|
||||
|
||||
// Clean up expired tokens periodically
|
||||
setInterval(() => this.cleanupExpiredTokens(), 300000); // Every 5 minutes
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a new CSRF token for a user session
|
||||
*/
|
||||
generateToken(sessionId?: string, userId?: string): CsrfTokenData {
|
||||
const secret = this.generateSecret();
|
||||
const token = this.generateTokenFromSecret(secret, sessionId, userId);
|
||||
const expiresAt = new Date(Date.now() + this.tokenExpiry);
|
||||
|
||||
const tokenData: CsrfTokenData = {
|
||||
token,
|
||||
secret,
|
||||
expiresAt,
|
||||
sessionId,
|
||||
userId
|
||||
};
|
||||
|
||||
// Store in cache for validation
|
||||
this.tokenCache.set(token, tokenData);
|
||||
|
||||
// Prevent memory leaks
|
||||
if (this.tokenCache.size > this.maxCacheSize) {
|
||||
this.cleanupExpiredTokens();
|
||||
}
|
||||
|
||||
this.logger.debug("CSRF token generated", {
|
||||
tokenHash: this.hashToken(token),
|
||||
sessionId,
|
||||
userId,
|
||||
expiresAt: expiresAt.toISOString()
|
||||
});
|
||||
|
||||
return tokenData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a CSRF token against the provided secret
|
||||
*/
|
||||
validateToken(
|
||||
token: string,
|
||||
secret: string,
|
||||
sessionId?: string,
|
||||
userId?: string
|
||||
): CsrfValidationResult {
|
||||
if (!token || !secret) {
|
||||
return {
|
||||
isValid: false,
|
||||
reason: "Missing token or secret"
|
||||
};
|
||||
}
|
||||
|
||||
// Check if token exists in cache
|
||||
const cachedTokenData = this.tokenCache.get(token);
|
||||
if (!cachedTokenData) {
|
||||
return {
|
||||
isValid: false,
|
||||
reason: "Token not found or expired"
|
||||
};
|
||||
}
|
||||
|
||||
// Check expiry
|
||||
if (cachedTokenData.expiresAt < new Date()) {
|
||||
this.tokenCache.delete(token);
|
||||
return {
|
||||
isValid: false,
|
||||
reason: "Token expired"
|
||||
};
|
||||
}
|
||||
|
||||
// Validate secret matches
|
||||
if (cachedTokenData.secret !== secret) {
|
||||
this.logger.warn("CSRF token validation failed - secret mismatch", {
|
||||
tokenHash: this.hashToken(token),
|
||||
sessionId,
|
||||
userId
|
||||
});
|
||||
return {
|
||||
isValid: false,
|
||||
reason: "Invalid secret"
|
||||
};
|
||||
}
|
||||
|
||||
// Validate session binding (if provided)
|
||||
if (sessionId && cachedTokenData.sessionId && cachedTokenData.sessionId !== sessionId) {
|
||||
this.logger.warn("CSRF token validation failed - session mismatch", {
|
||||
tokenHash: this.hashToken(token),
|
||||
expectedSession: cachedTokenData.sessionId,
|
||||
providedSession: sessionId
|
||||
});
|
||||
return {
|
||||
isValid: false,
|
||||
reason: "Session mismatch"
|
||||
};
|
||||
}
|
||||
|
||||
// Validate user binding (if provided)
|
||||
if (userId && cachedTokenData.userId && cachedTokenData.userId !== userId) {
|
||||
this.logger.warn("CSRF token validation failed - user mismatch", {
|
||||
tokenHash: this.hashToken(token),
|
||||
expectedUser: cachedTokenData.userId,
|
||||
providedUser: userId
|
||||
});
|
||||
return {
|
||||
isValid: false,
|
||||
reason: "User mismatch"
|
||||
};
|
||||
}
|
||||
|
||||
// Regenerate expected token to prevent timing attacks
|
||||
const expectedToken = this.generateTokenFromSecret(
|
||||
cachedTokenData.secret,
|
||||
cachedTokenData.sessionId,
|
||||
cachedTokenData.userId
|
||||
);
|
||||
|
||||
// Constant-time comparison
|
||||
if (!this.constantTimeEquals(token, expectedToken)) {
|
||||
this.logger.warn("CSRF token validation failed - token mismatch", {
|
||||
tokenHash: this.hashToken(token),
|
||||
sessionId,
|
||||
userId
|
||||
});
|
||||
return {
|
||||
isValid: false,
|
||||
reason: "Invalid token"
|
||||
};
|
||||
}
|
||||
|
||||
this.logger.debug("CSRF token validated successfully", {
|
||||
tokenHash: this.hashToken(token),
|
||||
sessionId,
|
||||
userId
|
||||
});
|
||||
|
||||
return {
|
||||
isValid: true,
|
||||
tokenData: cachedTokenData
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate a specific token
|
||||
*/
|
||||
invalidateToken(token: string): void {
|
||||
this.tokenCache.delete(token);
|
||||
this.logger.debug("CSRF token invalidated", {
|
||||
tokenHash: this.hashToken(token)
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate all tokens for a specific session
|
||||
*/
|
||||
invalidateSessionTokens(sessionId: string): void {
|
||||
let invalidatedCount = 0;
|
||||
for (const [token, tokenData] of this.tokenCache.entries()) {
|
||||
if (tokenData.sessionId === sessionId) {
|
||||
this.tokenCache.delete(token);
|
||||
invalidatedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug("CSRF tokens invalidated for session", {
|
||||
sessionId,
|
||||
invalidatedCount
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate all tokens for a specific user
|
||||
*/
|
||||
invalidateUserTokens(userId: string): void {
|
||||
let invalidatedCount = 0;
|
||||
for (const [token, tokenData] of this.tokenCache.entries()) {
|
||||
if (tokenData.userId === userId) {
|
||||
this.tokenCache.delete(token);
|
||||
invalidatedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.debug("CSRF tokens invalidated for user", {
|
||||
userId,
|
||||
invalidatedCount
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get token statistics for monitoring
|
||||
*/
|
||||
getTokenStats() {
|
||||
const now = new Date();
|
||||
let activeTokens = 0;
|
||||
let expiredTokens = 0;
|
||||
|
||||
for (const tokenData of this.tokenCache.values()) {
|
||||
if (tokenData.expiresAt > now) {
|
||||
activeTokens++;
|
||||
} else {
|
||||
expiredTokens++;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalTokens: this.tokenCache.size,
|
||||
activeTokens,
|
||||
expiredTokens,
|
||||
cacheSize: this.tokenCache.size,
|
||||
maxCacheSize: this.maxCacheSize
|
||||
};
|
||||
}
|
||||
|
||||
private generateSecret(): string {
|
||||
return crypto.randomBytes(32).toString('base64url');
|
||||
}
|
||||
|
||||
private generateTokenFromSecret(secret: string, sessionId?: string, userId?: string): string {
|
||||
const data = [secret, sessionId || '', userId || ''].join('|');
|
||||
const hmac = crypto.createHmac('sha256', this.secretKey);
|
||||
hmac.update(data);
|
||||
return hmac.digest('base64url');
|
||||
}
|
||||
|
||||
private generateSecretKey(): string {
|
||||
const key = crypto.randomBytes(64).toString('base64url');
|
||||
this.logger.warn("Generated CSRF secret key - set CSRF_SECRET_KEY environment variable for production");
|
||||
return key;
|
||||
}
|
||||
|
||||
private hashToken(token: string): string {
|
||||
// Create a hash of the token for logging (never log the actual token)
|
||||
return crypto.createHash('sha256').update(token).digest('hex').substring(0, 16);
|
||||
}
|
||||
|
||||
private constantTimeEquals(a: string, b: string): boolean {
|
||||
if (a.length !== b.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let result = 0;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
result |= a.charCodeAt(i) ^ b.charCodeAt(i);
|
||||
}
|
||||
|
||||
return result === 0;
|
||||
}
|
||||
|
||||
private cleanupExpiredTokens(): void {
|
||||
const now = new Date();
|
||||
let cleanedCount = 0;
|
||||
|
||||
for (const [token, tokenData] of this.tokenCache.entries()) {
|
||||
if (tokenData.expiresAt < now) {
|
||||
this.tokenCache.delete(token);
|
||||
cleanedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (cleanedCount > 0) {
|
||||
this.logger.debug("Cleaned up expired CSRF tokens", {
|
||||
cleanedCount,
|
||||
remainingTokens: this.tokenCache.size
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,454 @@
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { Logger } from "nestjs-pino";
|
||||
|
||||
export interface ErrorContext {
|
||||
userId?: string;
|
||||
requestId?: string;
|
||||
userAgent?: string;
|
||||
ip?: string;
|
||||
url?: string;
|
||||
method?: string;
|
||||
}
|
||||
|
||||
export interface SecureErrorMapping {
|
||||
code: string;
|
||||
publicMessage: string;
|
||||
logLevel: 'error' | 'warn' | 'info' | 'debug';
|
||||
shouldAlert?: boolean; // Whether to send alerts to monitoring
|
||||
}
|
||||
|
||||
export interface ErrorClassification {
|
||||
category: 'authentication' | 'authorization' | 'validation' | 'business' | 'system' | 'external';
|
||||
severity: 'low' | 'medium' | 'high' | 'critical';
|
||||
mapping: SecureErrorMapping;
|
||||
}
|
||||
|
||||
/**
|
||||
* Service for secure error message mapping to prevent information leakage
|
||||
* Maps internal errors to safe public messages while preserving security
|
||||
*/
|
||||
@Injectable()
|
||||
export class SecureErrorMapperService {
|
||||
private readonly isDevelopment: boolean;
|
||||
private readonly errorMappings: Map<string, SecureErrorMapping>;
|
||||
private readonly patternMappings: Array<{ pattern: RegExp; mapping: SecureErrorMapping }>;
|
||||
|
||||
constructor(
|
||||
private readonly configService: ConfigService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
) {
|
||||
this.isDevelopment = this.configService.get("NODE_ENV") !== "production";
|
||||
this.errorMappings = this.initializeErrorMappings();
|
||||
this.patternMappings = this.initializePatternMappings();
|
||||
}
|
||||
|
||||
/**
|
||||
* Map an error to a secure public message
|
||||
*/
|
||||
mapError(
|
||||
error: unknown,
|
||||
context?: ErrorContext
|
||||
): ErrorClassification {
|
||||
const errorMessage = this.extractErrorMessage(error);
|
||||
const errorCode = this.extractErrorCode(error);
|
||||
|
||||
// Try exact code mapping first
|
||||
if (errorCode && this.errorMappings.has(errorCode)) {
|
||||
const mapping = this.errorMappings.get(errorCode)!;
|
||||
return this.createClassification(errorMessage, mapping, context);
|
||||
}
|
||||
|
||||
// Try pattern matching
|
||||
for (const { pattern, mapping } of this.patternMappings) {
|
||||
if (pattern.test(errorMessage)) {
|
||||
return this.createClassification(errorMessage, mapping, context);
|
||||
}
|
||||
}
|
||||
|
||||
// Default fallback
|
||||
const defaultMapping = this.getDefaultMapping(errorMessage);
|
||||
return this.createClassification(errorMessage, defaultMapping, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a safe error message for client consumption
|
||||
*/
|
||||
getPublicMessage(error: unknown, context?: ErrorContext): string {
|
||||
const classification = this.mapError(error, context);
|
||||
|
||||
// In development, show more details
|
||||
if (this.isDevelopment) {
|
||||
const originalMessage = this.extractErrorMessage(error);
|
||||
return `${classification.mapping.publicMessage} (Dev: ${this.sanitizeForDevelopment(originalMessage)})`;
|
||||
}
|
||||
|
||||
return classification.mapping.publicMessage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Log error with appropriate security level
|
||||
*/
|
||||
logSecureError(
|
||||
error: unknown,
|
||||
context?: ErrorContext,
|
||||
additionalData?: Record<string, unknown>
|
||||
): void {
|
||||
const classification = this.mapError(error, context);
|
||||
const originalMessage = this.extractErrorMessage(error);
|
||||
|
||||
const logData = {
|
||||
errorCode: classification.mapping.code,
|
||||
category: classification.category,
|
||||
severity: classification.severity,
|
||||
publicMessage: classification.mapping.publicMessage,
|
||||
originalMessage: this.sanitizeForLogging(originalMessage),
|
||||
context,
|
||||
...additionalData
|
||||
};
|
||||
|
||||
// Log based on severity and log level
|
||||
switch (classification.mapping.logLevel) {
|
||||
case 'error':
|
||||
this.logger.error(`Security Error: ${classification.mapping.code}`, logData);
|
||||
break;
|
||||
case 'warn':
|
||||
this.logger.warn(`Security Warning: ${classification.mapping.code}`, logData);
|
||||
break;
|
||||
case 'info':
|
||||
this.logger.log(`Security Info: ${classification.mapping.code}`, logData);
|
||||
break;
|
||||
case 'debug':
|
||||
this.logger.debug(`Security Debug: ${classification.mapping.code}`, logData);
|
||||
break;
|
||||
}
|
||||
|
||||
// Send alerts for critical errors
|
||||
if (classification.mapping.shouldAlert && classification.severity === 'critical') {
|
||||
this.sendSecurityAlert(classification, context, logData);
|
||||
}
|
||||
}
|
||||
|
||||
private initializeErrorMappings(): Map<string, SecureErrorMapping> {
|
||||
return new Map([
|
||||
// Authentication Errors
|
||||
['INVALID_CREDENTIALS', {
|
||||
code: 'AUTH_001',
|
||||
publicMessage: 'Invalid email or password',
|
||||
logLevel: 'warn'
|
||||
}],
|
||||
['ACCOUNT_LOCKED', {
|
||||
code: 'AUTH_002',
|
||||
publicMessage: 'Account temporarily locked. Please try again later',
|
||||
logLevel: 'warn'
|
||||
}],
|
||||
['TOKEN_EXPIRED', {
|
||||
code: 'AUTH_003',
|
||||
publicMessage: 'Session expired. Please log in again',
|
||||
logLevel: 'info'
|
||||
}],
|
||||
['TOKEN_INVALID', {
|
||||
code: 'AUTH_004',
|
||||
publicMessage: 'Invalid session. Please log in again',
|
||||
logLevel: 'warn'
|
||||
}],
|
||||
|
||||
// Authorization Errors
|
||||
['INSUFFICIENT_PERMISSIONS', {
|
||||
code: 'AUTHZ_001',
|
||||
publicMessage: 'You do not have permission to perform this action',
|
||||
logLevel: 'warn'
|
||||
}],
|
||||
['RESOURCE_NOT_FOUND', {
|
||||
code: 'AUTHZ_002',
|
||||
publicMessage: 'The requested resource was not found',
|
||||
logLevel: 'info'
|
||||
}],
|
||||
|
||||
// Validation Errors
|
||||
['VALIDATION_FAILED', {
|
||||
code: 'VAL_001',
|
||||
publicMessage: 'The provided data is invalid',
|
||||
logLevel: 'info'
|
||||
}],
|
||||
['REQUIRED_FIELD_MISSING', {
|
||||
code: 'VAL_002',
|
||||
publicMessage: 'Required information is missing',
|
||||
logLevel: 'info'
|
||||
}],
|
||||
|
||||
// Business Logic Errors
|
||||
['ORDER_ALREADY_PROCESSED', {
|
||||
code: 'BIZ_001',
|
||||
publicMessage: 'This order has already been processed',
|
||||
logLevel: 'info'
|
||||
}],
|
||||
['INSUFFICIENT_BALANCE', {
|
||||
code: 'BIZ_002',
|
||||
publicMessage: 'Insufficient account balance',
|
||||
logLevel: 'info'
|
||||
}],
|
||||
['SERVICE_UNAVAILABLE', {
|
||||
code: 'BIZ_003',
|
||||
publicMessage: 'Service is temporarily unavailable',
|
||||
logLevel: 'warn'
|
||||
}],
|
||||
|
||||
// System Errors (High Security)
|
||||
['DATABASE_ERROR', {
|
||||
code: 'SYS_001',
|
||||
publicMessage: 'A system error occurred. Please try again later',
|
||||
logLevel: 'error',
|
||||
shouldAlert: true
|
||||
}],
|
||||
['EXTERNAL_SERVICE_ERROR', {
|
||||
code: 'SYS_002',
|
||||
publicMessage: 'External service temporarily unavailable',
|
||||
logLevel: 'error'
|
||||
}],
|
||||
['CONFIGURATION_ERROR', {
|
||||
code: 'SYS_003',
|
||||
publicMessage: 'System configuration error',
|
||||
logLevel: 'error',
|
||||
shouldAlert: true
|
||||
}],
|
||||
|
||||
// Rate Limiting
|
||||
['RATE_LIMIT_EXCEEDED', {
|
||||
code: 'RATE_001',
|
||||
publicMessage: 'Too many requests. Please try again later',
|
||||
logLevel: 'warn'
|
||||
}],
|
||||
|
||||
// Generic Fallbacks
|
||||
['UNKNOWN_ERROR', {
|
||||
code: 'GEN_001',
|
||||
publicMessage: 'An unexpected error occurred',
|
||||
logLevel: 'error',
|
||||
shouldAlert: true
|
||||
}]
|
||||
]);
|
||||
}
|
||||
|
||||
private initializePatternMappings(): Array<{ pattern: RegExp; mapping: SecureErrorMapping }> {
|
||||
return [
|
||||
// Database-related patterns
|
||||
{
|
||||
pattern: /database|connection|sql|prisma|postgres/i,
|
||||
mapping: {
|
||||
code: 'SYS_001',
|
||||
publicMessage: 'A system error occurred. Please try again later',
|
||||
logLevel: 'error',
|
||||
shouldAlert: true
|
||||
}
|
||||
},
|
||||
|
||||
// Authentication patterns
|
||||
{
|
||||
pattern: /password|credential|token|secret|key|auth/i,
|
||||
mapping: {
|
||||
code: 'AUTH_001',
|
||||
publicMessage: 'Authentication failed',
|
||||
logLevel: 'warn'
|
||||
}
|
||||
},
|
||||
|
||||
// File system patterns
|
||||
{
|
||||
pattern: /file|path|directory|permission denied|enoent|eacces/i,
|
||||
mapping: {
|
||||
code: 'SYS_002',
|
||||
publicMessage: 'System resource error',
|
||||
logLevel: 'error',
|
||||
shouldAlert: true
|
||||
}
|
||||
},
|
||||
|
||||
// Network/External service patterns
|
||||
{
|
||||
pattern: /network|timeout|connection refused|econnrefused|whmcs|salesforce/i,
|
||||
mapping: {
|
||||
code: 'SYS_002',
|
||||
publicMessage: 'External service temporarily unavailable',
|
||||
logLevel: 'error'
|
||||
}
|
||||
},
|
||||
|
||||
// Stack trace patterns
|
||||
{
|
||||
pattern: /\s+at\s+|\.js:\d+|\.ts:\d+|stack trace/i,
|
||||
mapping: {
|
||||
code: 'SYS_001',
|
||||
publicMessage: 'A system error occurred. Please try again later',
|
||||
logLevel: 'error',
|
||||
shouldAlert: true
|
||||
}
|
||||
},
|
||||
|
||||
// Memory/Resource patterns
|
||||
{
|
||||
pattern: /memory|heap|out of memory|resource|limit exceeded/i,
|
||||
mapping: {
|
||||
code: 'SYS_003',
|
||||
publicMessage: 'System resources temporarily unavailable',
|
||||
logLevel: 'error',
|
||||
shouldAlert: true
|
||||
}
|
||||
},
|
||||
|
||||
// Validation patterns
|
||||
{
|
||||
pattern: /invalid|required|missing|validation|format/i,
|
||||
mapping: {
|
||||
code: 'VAL_001',
|
||||
publicMessage: 'The provided data is invalid',
|
||||
logLevel: 'info'
|
||||
}
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
private createClassification(
|
||||
originalMessage: string,
|
||||
mapping: SecureErrorMapping,
|
||||
context?: ErrorContext
|
||||
): ErrorClassification {
|
||||
// Determine category and severity based on error code
|
||||
const category = this.determineCategory(mapping.code);
|
||||
const severity = this.determineSeverity(mapping.code, originalMessage);
|
||||
|
||||
return {
|
||||
category,
|
||||
severity,
|
||||
mapping
|
||||
};
|
||||
}
|
||||
|
||||
private determineCategory(code: string): ErrorClassification['category'] {
|
||||
if (code.startsWith('AUTH_')) return 'authentication';
|
||||
if (code.startsWith('AUTHZ_')) return 'authorization';
|
||||
if (code.startsWith('VAL_')) return 'validation';
|
||||
if (code.startsWith('BIZ_')) return 'business';
|
||||
if (code.startsWith('SYS_')) return 'system';
|
||||
return 'system';
|
||||
}
|
||||
|
||||
private determineSeverity(code: string, message: string): ErrorClassification['severity'] {
|
||||
// Critical system errors
|
||||
if (code === 'SYS_001' || code === 'SYS_003') return 'critical';
|
||||
|
||||
// High severity for authentication issues
|
||||
if (code.startsWith('AUTH_') && message.toLowerCase().includes('breach')) return 'high';
|
||||
|
||||
// Medium for external service issues
|
||||
if (code === 'SYS_002') return 'medium';
|
||||
|
||||
// Low for validation and business logic
|
||||
if (code.startsWith('VAL_') || code.startsWith('BIZ_')) return 'low';
|
||||
|
||||
return 'medium';
|
||||
}
|
||||
|
||||
private getDefaultMapping(message: string): SecureErrorMapping {
|
||||
// Analyze message for sensitivity
|
||||
if (this.containsSensitiveInfo(message)) {
|
||||
return {
|
||||
code: 'SYS_001',
|
||||
publicMessage: 'A system error occurred. Please try again later',
|
||||
logLevel: 'error',
|
||||
shouldAlert: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
code: 'GEN_001',
|
||||
publicMessage: 'An unexpected error occurred',
|
||||
logLevel: 'error'
|
||||
};
|
||||
}
|
||||
|
||||
private containsSensitiveInfo(message: string): boolean {
|
||||
const sensitivePatterns = [
|
||||
/password|secret|key|token|credential/i,
|
||||
/database|sql|connection/i,
|
||||
/file|path|directory/i,
|
||||
/\s+at\s+.*\.js:\d+/i, // Stack traces
|
||||
/[a-zA-Z]:[\\\/]/, // Windows paths
|
||||
/\/[a-zA-Z0-9._\-/]+\.(js|ts|py|php)/i, // Unix paths
|
||||
/\b(?:\d{1,3}\.){3}\d{1,3}\b/, // IP addresses
|
||||
/[A-Za-z0-9]{32,}/ // Long tokens/hashes
|
||||
];
|
||||
|
||||
return sensitivePatterns.some(pattern => pattern.test(message));
|
||||
}
|
||||
|
||||
private extractErrorMessage(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return error.message;
|
||||
}
|
||||
if (typeof error === 'string') {
|
||||
return error;
|
||||
}
|
||||
if (typeof error === 'object' && error !== null) {
|
||||
const obj = error as Record<string, unknown>;
|
||||
if (typeof obj.message === 'string') {
|
||||
return obj.message;
|
||||
}
|
||||
}
|
||||
return 'Unknown error';
|
||||
}
|
||||
|
||||
private extractErrorCode(error: unknown): string | null {
|
||||
if (typeof error === 'object' && error !== null) {
|
||||
const obj = error as Record<string, unknown>;
|
||||
if (typeof obj.code === 'string') {
|
||||
return obj.code;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private sanitizeForLogging(message: string): string {
|
||||
return message
|
||||
// Remove file paths
|
||||
.replace(/\/[a-zA-Z0-9._\-/]+\.(js|ts|py|php)/g, '[file]')
|
||||
// Remove stack traces
|
||||
.replace(/\s+at\s+.*/g, '')
|
||||
// Remove absolute paths
|
||||
.replace(/[a-zA-Z]:[\\\/][^:]+/g, '[path]')
|
||||
// Remove IP addresses
|
||||
.replace(/\b(?:\d{1,3}\.){3}\d{1,3}\b/g, '[ip]')
|
||||
// Remove URLs with credentials
|
||||
.replace(/https?:\/\/[^:]+:[^@]+@[^\s]+/g, '[url]')
|
||||
// Remove potential secrets
|
||||
.replace(/\b[A-Za-z0-9]{32,}\b/g, '[token]')
|
||||
.trim();
|
||||
}
|
||||
|
||||
private sanitizeForDevelopment(message: string): string {
|
||||
// In development, show more but still remove the most sensitive parts
|
||||
return message
|
||||
.replace(/password[=:]\s*[^\s]+/gi, 'password=[HIDDEN]')
|
||||
.replace(/secret[=:]\s*[^\s]+/gi, 'secret=[HIDDEN]')
|
||||
.replace(/token[=:]\s*[^\s]+/gi, 'token=[HIDDEN]')
|
||||
.replace(/key[=:]\s*[^\s]+/gi, 'key=[HIDDEN]');
|
||||
}
|
||||
|
||||
private sendSecurityAlert(
|
||||
classification: ErrorClassification,
|
||||
context?: ErrorContext,
|
||||
logData?: Record<string, unknown>
|
||||
): void {
|
||||
// In a real implementation, this would send alerts to monitoring systems
|
||||
// like Slack, PagerDuty, or custom alerting systems
|
||||
this.logger.error('SECURITY ALERT TRIGGERED', {
|
||||
alertType: 'CRITICAL_ERROR',
|
||||
errorCode: classification.mapping.code,
|
||||
category: classification.category,
|
||||
severity: classification.severity,
|
||||
context,
|
||||
timestamp: new Date().toISOString(),
|
||||
...logData
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -52,9 +52,12 @@ type PubSubCtor = new (opts: {
|
||||
@Injectable()
|
||||
export class SalesforcePubSubSubscriber implements OnModuleInit, OnModuleDestroy {
|
||||
private client: PubSubClient | null = null;
|
||||
private clientAccessToken: string | null = null;
|
||||
private channel!: string;
|
||||
private replayCorruptionRecovered = false;
|
||||
private subscribeCallback!: SubscribeCallback;
|
||||
private pubSubCtor: PubSubCtor | null = null;
|
||||
private clientBuildInFlight: Promise<PubSubClient> | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly config: ConfigService,
|
||||
@ -77,171 +80,8 @@ export class SalesforcePubSubSubscriber implements OnModuleInit, OnModuleDestroy
|
||||
);
|
||||
|
||||
try {
|
||||
await this.sfConn.connect();
|
||||
const accessToken = this.sfConn.getAccessToken();
|
||||
const instanceUrl = this.sfConn.getInstanceUrl();
|
||||
if (!accessToken || !instanceUrl) {
|
||||
throw new Error("Salesforce access token || instance URL unavailable");
|
||||
}
|
||||
|
||||
const endpoint = this.config.get<string>(
|
||||
"SF_PUBSUB_ENDPOINT",
|
||||
"api.pubsub.salesforce.com:7443"
|
||||
);
|
||||
|
||||
const maybeCtor: unknown =
|
||||
(PubSubApiClientPkg as { default?: unknown })?.default ?? (PubSubApiClientPkg as unknown);
|
||||
const Ctor = maybeCtor as PubSubCtor;
|
||||
this.client = new Ctor({
|
||||
authType: "user-supplied",
|
||||
accessToken,
|
||||
instanceUrl,
|
||||
pubSubEndpoint: endpoint,
|
||||
});
|
||||
await this.client.connect();
|
||||
if (!this.client) throw new Error("Pub/Sub client not initialized after connect");
|
||||
const client = this.client;
|
||||
|
||||
const _replayKey = sfReplayKey(this.channel);
|
||||
const _replayMode = this.config.get<string>("SF_EVENTS_REPLAY", "LATEST");
|
||||
const numRequested = Number(this.config.get("SF_PUBSUB_NUM_REQUESTED", "50")) || 50;
|
||||
const maxQueue = Number(this.config.get("SF_PUBSUB_QUEUE_MAX", "100")) || 100;
|
||||
|
||||
await this.cache.set(sfStatusKey(this.channel), {
|
||||
status: "connecting",
|
||||
since: Date.now(),
|
||||
});
|
||||
|
||||
this.subscribeCallback = async (subscription, callbackType, data) => {
|
||||
try {
|
||||
const argTypes = [typeof subscription, typeof callbackType, typeof data];
|
||||
const type = callbackType;
|
||||
const typeNorm = String(type || "").toLowerCase();
|
||||
const topic = subscription.topicName || this.channel;
|
||||
|
||||
if (typeNorm === "data" || typeNorm === "event") {
|
||||
const event = data as SalesforcePubSubEvent;
|
||||
// Basic breadcrumb to confirm we are handling data callbacks
|
||||
this.logger.debug("SF Pub/Sub data callback received", {
|
||||
topic,
|
||||
argTypes,
|
||||
hasPayload: Boolean(event?.payload),
|
||||
});
|
||||
const payload = event?.payload;
|
||||
|
||||
// Only check parsed payload
|
||||
const orderIdVal = payload?.["OrderId__c"] ?? payload?.["OrderId"];
|
||||
const orderId = typeof orderIdVal === "string" ? orderIdVal : undefined;
|
||||
if (!orderId) {
|
||||
this.logger.warn("Pub/Sub event missing OrderId__c; skipping", {
|
||||
argTypes,
|
||||
topic,
|
||||
payloadKeys: payload ? Object.keys(payload) : [],
|
||||
});
|
||||
const depth = await this.provisioningQueue.depth();
|
||||
if (depth < maxQueue) {
|
||||
await client.requestAdditionalEvents(topic, 1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const replayVal = (event as { replayId?: unknown })?.replayId;
|
||||
const idempotencyKey =
|
||||
typeof replayVal === "number" || typeof replayVal === "string"
|
||||
? String(replayVal)
|
||||
: String(Date.now());
|
||||
const pubsubReplayId = typeof replayVal === "number" ? replayVal : undefined;
|
||||
|
||||
await this.provisioningQueue.enqueue({
|
||||
sfOrderId: orderId,
|
||||
idempotencyKey,
|
||||
pubsubReplayId,
|
||||
});
|
||||
this.logger.log("Enqueued provisioning job from SF event", {
|
||||
sfOrderId: orderId,
|
||||
replayId: pubsubReplayId,
|
||||
topic,
|
||||
});
|
||||
|
||||
// Do not request more here; rely on 'lastevent' to top-up
|
||||
} else if (typeNorm === "lastevent") {
|
||||
const depth = await this.provisioningQueue.depth();
|
||||
const available = Math.max(0, maxQueue - depth);
|
||||
const desired = Math.max(0, Math.min(numRequested, available));
|
||||
if (desired > 0) {
|
||||
await client.requestAdditionalEvents(topic, desired);
|
||||
}
|
||||
} else if (typeNorm === "grpckeepalive") {
|
||||
const latestVal = (data as { latestReplayId?: unknown })?.latestReplayId;
|
||||
const latest = typeof latestVal === "number" ? latestVal : undefined;
|
||||
if (typeof latest === "number") {
|
||||
await this.cache.set(sfLatestSeenKey(this.channel), {
|
||||
id: String(latest),
|
||||
at: Date.now(),
|
||||
});
|
||||
}
|
||||
} else if (typeNorm === "grpcstatus" || typeNorm === "end") {
|
||||
// No-op; informational
|
||||
} else if (typeNorm === "error") {
|
||||
this.logger.warn("SF Pub/Sub stream error", { topic, data });
|
||||
try {
|
||||
// Detect replay id corruption and auto-recover once by clearing the cursor and resubscribing
|
||||
const errorData = data as SalesforcePubSubError;
|
||||
const details = errorData.details || "";
|
||||
const metadata = errorData.metadata || {};
|
||||
const errorCodes = Array.isArray(metadata["error-code"])
|
||||
? metadata["error-code"]
|
||||
: [];
|
||||
const hasCorruptionCode = errorCodes.some(code =>
|
||||
String(code).includes("replayid.corrupted")
|
||||
);
|
||||
const mentionsReplayValidation = /Replay ID validation failed/i.test(details);
|
||||
|
||||
if (
|
||||
(hasCorruptionCode || mentionsReplayValidation) &&
|
||||
!this.replayCorruptionRecovered
|
||||
) {
|
||||
this.replayCorruptionRecovered = true;
|
||||
const key = sfReplayKey(this.channel);
|
||||
await this.cache.del(key);
|
||||
this.logger.warn(
|
||||
"Cleared invalid Salesforce Pub/Sub replay cursor; retrying subscription",
|
||||
{
|
||||
channel: this.channel,
|
||||
key,
|
||||
}
|
||||
);
|
||||
await this.cache.set(sfStatusKey(this.channel), {
|
||||
status: "reconnecting",
|
||||
since: Date.now(),
|
||||
});
|
||||
// Try re-subscribing without the invalid cursor
|
||||
await this.subscribeWithPolicy();
|
||||
}
|
||||
} catch (recoveryErr) {
|
||||
this.logger.warn("SF Pub/Sub replay corruption auto-recovery failed", {
|
||||
error: recoveryErr instanceof Error ? recoveryErr.message : String(recoveryErr),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Unknown callback type: log once with minimal context
|
||||
const maybeEvent = data as SalesforcePubSubEvent | undefined;
|
||||
const hasPayload = Boolean(maybeEvent?.payload);
|
||||
this.logger.debug("SF Pub/Sub callback ignored (unknown type)", {
|
||||
type,
|
||||
topic,
|
||||
argTypes,
|
||||
hasPayload,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error("Pub/Sub subscribe callback failed", {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
await this.subscribeWithPolicy();
|
||||
this.subscribeCallback = this.buildSubscribeCallback();
|
||||
await this.subscribeWithPolicy(true);
|
||||
} catch (error) {
|
||||
this.logger.error("Salesforce Pub/Sub subscription failed", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
@ -261,10 +101,7 @@ export class SalesforcePubSubSubscriber implements OnModuleInit, OnModuleDestroy
|
||||
|
||||
async onModuleDestroy(): Promise<void> {
|
||||
try {
|
||||
if (this.client) {
|
||||
await this.client.close();
|
||||
this.client = null;
|
||||
}
|
||||
await this.safeCloseClient();
|
||||
await this.cache.set(sfStatusKey(this.channel), {
|
||||
status: "disconnected",
|
||||
since: Date.now(),
|
||||
@ -276,30 +113,226 @@ export class SalesforcePubSubSubscriber implements OnModuleInit, OnModuleDestroy
|
||||
}
|
||||
}
|
||||
|
||||
private async subscribeWithPolicy(): Promise<void> {
|
||||
if (!this.client) throw new Error("Pub/Sub client not initialized");
|
||||
if (!this.subscribeCallback) throw new Error("Subscribe callback not initialized");
|
||||
private buildSubscribeCallback(): SubscribeCallback {
|
||||
return async (subscription, callbackType, data) => {
|
||||
try {
|
||||
const argTypes = [typeof subscription, typeof callbackType, typeof data];
|
||||
const type = callbackType;
|
||||
const typeNorm = String(type || "").toLowerCase();
|
||||
const topic = subscription.topicName || this.channel;
|
||||
|
||||
const _replayMode = this.config.get<string>("SF_EVENTS_REPLAY", "LATEST");
|
||||
const numRequested = Number(this.config.get("SF_PUBSUB_NUM_REQUESTED", "50")) || 50;
|
||||
const _replayKey = sfReplayKey(this.channel);
|
||||
const storedReplay = _replayMode !== "ALL" ? await this.cache.get<string>(_replayKey) : null;
|
||||
if (typeNorm === "data" || typeNorm === "event") {
|
||||
const event = data as SalesforcePubSubEvent;
|
||||
this.logger.debug("SF Pub/Sub data callback received", {
|
||||
topic,
|
||||
argTypes,
|
||||
hasPayload: Boolean(event?.payload),
|
||||
});
|
||||
const payload = event?.payload;
|
||||
|
||||
if (storedReplay && _replayMode !== "ALL") {
|
||||
await this.client.subscribeFromReplayId(
|
||||
const orderIdVal = payload?.["OrderId__c"] ?? payload?.["OrderId"];
|
||||
const orderId = typeof orderIdVal === "string" ? orderIdVal : undefined;
|
||||
if (!orderId) {
|
||||
this.logger.warn("Pub/Sub event missing OrderId__c; skipping", {
|
||||
argTypes,
|
||||
topic,
|
||||
payloadKeys: payload ? Object.keys(payload) : [],
|
||||
});
|
||||
const depth = await this.provisioningQueue.depth();
|
||||
if (depth < this.getMaxQueueSize()) {
|
||||
const activeClient = this.client;
|
||||
if (activeClient) {
|
||||
await activeClient.requestAdditionalEvents(topic, 1);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const replayVal = (event as { replayId?: unknown })?.replayId;
|
||||
const idempotencyKey =
|
||||
typeof replayVal === "number" || typeof replayVal === "string"
|
||||
? String(replayVal)
|
||||
: String(Date.now());
|
||||
const pubsubReplayId = typeof replayVal === "number" ? replayVal : undefined;
|
||||
|
||||
await this.provisioningQueue.enqueue({
|
||||
sfOrderId: orderId,
|
||||
idempotencyKey,
|
||||
pubsubReplayId,
|
||||
});
|
||||
this.logger.log("Enqueued provisioning job from SF event", {
|
||||
sfOrderId: orderId,
|
||||
replayId: pubsubReplayId,
|
||||
topic,
|
||||
});
|
||||
} else if (typeNorm === "lastevent") {
|
||||
const depth = await this.provisioningQueue.depth();
|
||||
const available = Math.max(0, this.getMaxQueueSize() - depth);
|
||||
const desired = Math.max(0, Math.min(this.getNumRequested(), available));
|
||||
if (desired > 0) {
|
||||
const activeClient = this.client;
|
||||
if (activeClient) {
|
||||
await activeClient.requestAdditionalEvents(topic, desired);
|
||||
}
|
||||
}
|
||||
} else if (typeNorm === "grpckeepalive") {
|
||||
const latestVal = (data as { latestReplayId?: unknown })?.latestReplayId;
|
||||
const latest = typeof latestVal === "number" ? latestVal : undefined;
|
||||
if (typeof latest === "number") {
|
||||
await this.cache.set(sfLatestSeenKey(this.channel), {
|
||||
id: String(latest),
|
||||
at: Date.now(),
|
||||
});
|
||||
}
|
||||
} else if (typeNorm === "grpcstatus" || typeNorm === "end") {
|
||||
// Informational – no action required
|
||||
} else if (typeNorm === "error") {
|
||||
this.logger.warn("SF Pub/Sub stream error", { topic, data });
|
||||
try {
|
||||
const errorData = data as SalesforcePubSubError;
|
||||
const details = errorData.details || "";
|
||||
const metadata = errorData.metadata || {};
|
||||
const errorCodes = Array.isArray(metadata["error-code"])
|
||||
? metadata["error-code"]
|
||||
: [];
|
||||
const hasCorruptionCode = errorCodes.some(code =>
|
||||
String(code).includes("replayid.corrupted")
|
||||
);
|
||||
const mentionsReplayValidation = /Replay ID validation failed/i.test(details);
|
||||
|
||||
if ((hasCorruptionCode || mentionsReplayValidation) && !this.replayCorruptionRecovered) {
|
||||
this.replayCorruptionRecovered = true;
|
||||
const key = sfReplayKey(this.channel);
|
||||
await this.cache.del(key);
|
||||
this.logger.warn(
|
||||
"Cleared invalid Salesforce Pub/Sub replay cursor; retrying subscription",
|
||||
{
|
||||
channel: this.channel,
|
||||
key,
|
||||
}
|
||||
);
|
||||
}
|
||||
} catch (recoveryErr) {
|
||||
this.logger.warn("SF Pub/Sub replay corruption auto-recovery failed", {
|
||||
error: recoveryErr instanceof Error ? recoveryErr.message : String(recoveryErr),
|
||||
});
|
||||
} finally {
|
||||
await this.recoverFromStreamError();
|
||||
}
|
||||
} else {
|
||||
const maybeEvent = data as SalesforcePubSubEvent | undefined;
|
||||
const hasPayload = Boolean(maybeEvent?.payload);
|
||||
this.logger.debug("SF Pub/Sub callback ignored (unknown type)", {
|
||||
type,
|
||||
topic,
|
||||
argTypes,
|
||||
hasPayload,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
this.logger.error("Pub/Sub subscribe callback failed", {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private getNumRequested(): number {
|
||||
return Number(this.config.get("SF_PUBSUB_NUM_REQUESTED", "50")) || 50;
|
||||
}
|
||||
|
||||
private getMaxQueueSize(): number {
|
||||
return Number(this.config.get("SF_PUBSUB_QUEUE_MAX", "100")) || 100;
|
||||
}
|
||||
|
||||
private getPubSubCtor(): PubSubCtor {
|
||||
if (this.pubSubCtor) {
|
||||
return this.pubSubCtor;
|
||||
}
|
||||
const maybeCtor =
|
||||
(PubSubApiClientPkg as { default?: unknown })?.default ?? (PubSubApiClientPkg as unknown);
|
||||
this.pubSubCtor = maybeCtor as PubSubCtor;
|
||||
return this.pubSubCtor;
|
||||
}
|
||||
|
||||
private async ensureClient(forceRefresh = false): Promise<PubSubClient> {
|
||||
if (this.clientBuildInFlight && !forceRefresh) {
|
||||
return this.clientBuildInFlight;
|
||||
}
|
||||
|
||||
this.clientBuildInFlight = (async () => {
|
||||
await this.sfConn.ensureConnected();
|
||||
const accessToken = this.sfConn.getAccessToken();
|
||||
const instanceUrl = this.sfConn.getInstanceUrl();
|
||||
if (!accessToken || !instanceUrl) {
|
||||
throw new Error("Salesforce access token || instance URL unavailable");
|
||||
}
|
||||
|
||||
const tokenChanged = this.clientAccessToken !== accessToken;
|
||||
|
||||
if (!this.client || forceRefresh || tokenChanged) {
|
||||
await this.safeCloseClient();
|
||||
|
||||
const endpoint = this.config.get<string>(
|
||||
"SF_PUBSUB_ENDPOINT",
|
||||
"api.pubsub.salesforce.com:7443"
|
||||
);
|
||||
const Ctor = this.getPubSubCtor();
|
||||
const client = new Ctor({
|
||||
authType: "user-supplied",
|
||||
accessToken,
|
||||
instanceUrl,
|
||||
pubSubEndpoint: endpoint,
|
||||
});
|
||||
|
||||
await client.connect();
|
||||
this.client = client;
|
||||
this.clientAccessToken = accessToken;
|
||||
this.replayCorruptionRecovered = false;
|
||||
}
|
||||
|
||||
return this.client!;
|
||||
})();
|
||||
|
||||
try {
|
||||
return await this.clientBuildInFlight;
|
||||
} finally {
|
||||
this.clientBuildInFlight = null;
|
||||
}
|
||||
}
|
||||
|
||||
private async subscribeWithPolicy(forceClientRefresh = false): Promise<void> {
|
||||
if (!this.subscribeCallback) {
|
||||
throw new Error("Subscribe callback not initialized");
|
||||
}
|
||||
|
||||
await this.cache.set(sfStatusKey(this.channel), {
|
||||
status: "connecting",
|
||||
since: Date.now(),
|
||||
});
|
||||
|
||||
const client = await this.ensureClient(forceClientRefresh);
|
||||
|
||||
const replayMode = this.config.get<string>("SF_EVENTS_REPLAY", "LATEST");
|
||||
const replayKey = sfReplayKey(this.channel);
|
||||
const storedReplay = replayMode !== "ALL" ? await this.cache.get<string>(replayKey) : null;
|
||||
const numRequested = this.getNumRequested();
|
||||
|
||||
if (storedReplay && replayMode !== "ALL") {
|
||||
await client.subscribeFromReplayId(
|
||||
this.channel,
|
||||
this.subscribeCallback,
|
||||
numRequested,
|
||||
Number(storedReplay)
|
||||
);
|
||||
} else if (_replayMode === "ALL") {
|
||||
await this.client.subscribeFromEarliestEvent(
|
||||
} else if (replayMode === "ALL") {
|
||||
await client.subscribeFromEarliestEvent(
|
||||
this.channel,
|
||||
this.subscribeCallback,
|
||||
numRequested
|
||||
);
|
||||
} else {
|
||||
await this.client.subscribe(this.channel, this.subscribeCallback, numRequested);
|
||||
await client.subscribe(this.channel, this.subscribeCallback, numRequested);
|
||||
}
|
||||
|
||||
await this.cache.set(sfStatusKey(this.channel), {
|
||||
@ -309,5 +342,28 @@ export class SalesforcePubSubSubscriber implements OnModuleInit, OnModuleDestroy
|
||||
this.logger.log("Salesforce Pub/Sub subscription active", { channel: this.channel });
|
||||
}
|
||||
|
||||
// keys moved to shared util
|
||||
private async recoverFromStreamError(): Promise<void> {
|
||||
await this.cache.set(sfStatusKey(this.channel), {
|
||||
status: "reconnecting",
|
||||
since: Date.now(),
|
||||
});
|
||||
await this.safeCloseClient();
|
||||
await this.subscribeWithPolicy(true);
|
||||
}
|
||||
|
||||
private async safeCloseClient(): Promise<void> {
|
||||
if (!this.client) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await this.client.close();
|
||||
} catch (error) {
|
||||
this.logger.warn("Failed to close Salesforce Pub/Sub client", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
} finally {
|
||||
this.client = null;
|
||||
this.clientAccessToken = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { QueueModule } from "@bff/core/queue/queue.module";
|
||||
import { SalesforceService } from "./salesforce.service";
|
||||
import { SalesforceConnection } from "./services/salesforce-connection.service";
|
||||
import { SalesforceAccountService } from "./services/salesforce-account.service";
|
||||
|
||||
@Module({
|
||||
imports: [QueueModule],
|
||||
providers: [SalesforceConnection, SalesforceAccountService, SalesforceService],
|
||||
exports: [SalesforceService, SalesforceConnection],
|
||||
})
|
||||
|
||||
@ -3,7 +3,7 @@ import { Logger } from "nestjs-pino";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { getErrorMessage } from "@bff/core/utils/error.util";
|
||||
import { SalesforceConnection } from "./services/salesforce-connection.service";
|
||||
import { getSalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
import { SalesforceFieldMapService } from "@bff/core/config/field-map";
|
||||
import {
|
||||
SalesforceAccountService,
|
||||
type AccountData,
|
||||
@ -26,6 +26,7 @@ export class SalesforceService implements OnModuleInit {
|
||||
private configService: ConfigService,
|
||||
private connection: SalesforceConnection,
|
||||
private accountService: SalesforceAccountService,
|
||||
private fieldMapService: SalesforceFieldMapService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
) {}
|
||||
|
||||
@ -116,7 +117,7 @@ export class SalesforceService implements OnModuleInit {
|
||||
throw new Error("Salesforce connection not available");
|
||||
}
|
||||
|
||||
const fields = getSalesforceFieldMap();
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
const result = (await this.connection.query(
|
||||
`SELECT Id, Status, ${fields.order.activationStatus}, ${fields.order.whmcsOrderId},
|
||||
${fields.order.lastErrorCode}, ${fields.order.lastErrorMessage},
|
||||
|
||||
@ -2,6 +2,7 @@ import { Injectable, Inject } from "@nestjs/common";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { getErrorMessage } from "@bff/core/utils/error.util";
|
||||
import { SalesforceRequestQueueService } from "@bff/core/queue/services/salesforce-request-queue.service";
|
||||
import * as jsforce from "jsforce";
|
||||
import * as jwt from "jsonwebtoken";
|
||||
import * as fs from "fs/promises";
|
||||
@ -15,9 +16,13 @@ export interface SalesforceSObjectApi {
|
||||
@Injectable()
|
||||
export class SalesforceConnection {
|
||||
private connection: jsforce.Connection;
|
||||
private tokenExpiresAt: number | null = null;
|
||||
private tokenIssuedAt: number | null = null;
|
||||
private connectPromise: Promise<void> | null = null;
|
||||
|
||||
constructor(
|
||||
private configService: ConfigService,
|
||||
private readonly requestQueue: SalesforceRequestQueueService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
) {
|
||||
this.connection = new jsforce.Connection({
|
||||
@ -43,7 +48,20 @@ export class SalesforceConnection {
|
||||
return this.connection?.instanceUrl as string | undefined;
|
||||
}
|
||||
|
||||
async connect(): Promise<void> {
|
||||
async connect(force = false): Promise<void> {
|
||||
if (this.connectPromise && !force) {
|
||||
return this.connectPromise;
|
||||
}
|
||||
|
||||
this.connectPromise = this.performConnect();
|
||||
try {
|
||||
await this.connectPromise;
|
||||
} finally {
|
||||
this.connectPromise = null;
|
||||
}
|
||||
}
|
||||
|
||||
private async performConnect(): Promise<void> {
|
||||
const nodeEnv =
|
||||
this.configService.get<string>("NODE_ENV") || process.env.NODE_ENV || "development";
|
||||
const isProd = nodeEnv === "production";
|
||||
@ -149,6 +167,11 @@ export class SalesforceConnection {
|
||||
this.connection.accessToken = tokenResponse.access_token;
|
||||
this.connection.instanceUrl = tokenResponse.instance_url;
|
||||
|
||||
const tokenTtlMs = this.getTokenTtl();
|
||||
const issuedAt = Date.now();
|
||||
this.tokenIssuedAt = issuedAt;
|
||||
this.tokenExpiresAt = issuedAt + tokenTtlMs;
|
||||
|
||||
this.logger.log("✅ Salesforce connection established");
|
||||
} catch (error) {
|
||||
const message = getErrorMessage(error);
|
||||
@ -165,10 +188,7 @@ export class SalesforceConnection {
|
||||
async query(soql: string): Promise<unknown> {
|
||||
try {
|
||||
// Ensure we have a base URL and token
|
||||
if (!this.isConnected()) {
|
||||
this.logger.warn("Salesforce not connected; attempting to establish connection");
|
||||
await this.connect();
|
||||
}
|
||||
await this.ensureConnected();
|
||||
return await this.connection.query(soql);
|
||||
} catch (error: unknown) {
|
||||
// Check if this is a session expiration error
|
||||
@ -177,7 +197,7 @@ export class SalesforceConnection {
|
||||
|
||||
try {
|
||||
// Re-authenticate
|
||||
await this.connect();
|
||||
await this.connect(true);
|
||||
|
||||
// Retry the query once
|
||||
this.logger.debug("Retrying query after re-authentication");
|
||||
@ -220,10 +240,7 @@ export class SalesforceConnection {
|
||||
return {
|
||||
create: async (data: object) => {
|
||||
try {
|
||||
if (!this.isConnected()) {
|
||||
this.logger.warn("Salesforce not connected; attempting to establish connection");
|
||||
await this.connect();
|
||||
}
|
||||
await this.ensureConnected();
|
||||
return await originalSObject.create(data);
|
||||
} catch (error: unknown) {
|
||||
if (this.isSessionExpiredError(error)) {
|
||||
@ -232,7 +249,7 @@ export class SalesforceConnection {
|
||||
);
|
||||
|
||||
try {
|
||||
await this.connect();
|
||||
await this.connect(true);
|
||||
const newSObject = this.connection.sobject(type);
|
||||
return await newSObject.create(data);
|
||||
} catch (retryError) {
|
||||
@ -249,10 +266,7 @@ export class SalesforceConnection {
|
||||
|
||||
update: async (data: object & { Id: string }) => {
|
||||
try {
|
||||
if (!this.isConnected()) {
|
||||
this.logger.warn("Salesforce not connected; attempting to establish connection");
|
||||
await this.connect();
|
||||
}
|
||||
await this.ensureConnected();
|
||||
return await originalSObject.update(data);
|
||||
} catch (error: unknown) {
|
||||
if (this.isSessionExpiredError(error)) {
|
||||
@ -261,7 +275,7 @@ export class SalesforceConnection {
|
||||
);
|
||||
|
||||
try {
|
||||
await this.connect();
|
||||
await this.connect(true);
|
||||
const newSObject = this.connection.sobject(type);
|
||||
return await newSObject.update(data);
|
||||
} catch (retryError) {
|
||||
@ -281,4 +295,110 @@ export class SalesforceConnection {
|
||||
isConnected(): boolean {
|
||||
return !!this.connection.accessToken;
|
||||
}
|
||||
|
||||
private getTokenTtl(): number {
|
||||
const configured = Number(this.configService.get("SF_TOKEN_TTL_MS"));
|
||||
if (Number.isFinite(configured) && configured > 0) {
|
||||
return configured;
|
||||
}
|
||||
// Default to 12 minutes (tokens typically expire after 15 minutes)
|
||||
return 12 * 60 * 1000;
|
||||
}
|
||||
|
||||
private getRefreshBuffer(): number {
|
||||
const configured = Number(this.configService.get("SF_TOKEN_REFRESH_BUFFER_MS"));
|
||||
if (Number.isFinite(configured) && configured >= 0) {
|
||||
return configured;
|
||||
}
|
||||
return 60 * 1000; // 1 minute buffer
|
||||
}
|
||||
|
||||
private isTokenExpiring(): boolean {
|
||||
if (!this.tokenExpiresAt) {
|
||||
return true;
|
||||
}
|
||||
return Date.now() + this.getRefreshBuffer() >= this.tokenExpiresAt;
|
||||
}
|
||||
|
||||
async ensureConnected(): Promise<void> {
|
||||
if (!this.isConnected() || this.isTokenExpiring()) {
|
||||
this.logger.debug("Salesforce connection stale; refreshing access token");
|
||||
await this.connect(!this.isConnected());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a high-priority Salesforce request (jumps queue)
|
||||
*/
|
||||
async queryHighPriority(soql: string): Promise<unknown> {
|
||||
return this.requestQueue.executeHighPriority(async () => {
|
||||
await this.ensureConnected();
|
||||
return await this.connection.query(soql);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue metrics for monitoring
|
||||
*/
|
||||
getQueueMetrics() {
|
||||
return this.requestQueue.getMetrics();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue health status
|
||||
*/
|
||||
getQueueHealth() {
|
||||
return this.requestQueue.getHealthStatus();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get daily API usage information
|
||||
*/
|
||||
getDailyUsage() {
|
||||
return this.requestQueue.getDailyUsage();
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine query priority based on SOQL content
|
||||
*/
|
||||
private getQueryPriority(soql: string): number {
|
||||
const lowerSoql = soql.toLowerCase();
|
||||
|
||||
// High priority queries (critical for user experience)
|
||||
if (
|
||||
lowerSoql.includes("account") ||
|
||||
lowerSoql.includes("user") ||
|
||||
lowerSoql.includes("where id =")
|
||||
) {
|
||||
return 8;
|
||||
}
|
||||
|
||||
// Medium priority queries
|
||||
if (
|
||||
lowerSoql.includes("order") ||
|
||||
lowerSoql.includes("invoice") ||
|
||||
lowerSoql.includes("limit 1")
|
||||
) {
|
||||
return 5;
|
||||
}
|
||||
|
||||
// Low priority (bulk queries, reports)
|
||||
return 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if query is long-running based on SOQL content
|
||||
*/
|
||||
private isLongRunningQuery(soql: string): boolean {
|
||||
const lowerSoql = soql.toLowerCase();
|
||||
|
||||
// Queries likely to take >20 seconds
|
||||
return (
|
||||
lowerSoql.includes("count(") ||
|
||||
lowerSoql.includes("group by") ||
|
||||
lowerSoql.includes("order by") ||
|
||||
(lowerSoql.includes("limit") && !lowerSoql.includes("limit 1")) ||
|
||||
lowerSoql.length > 500 // Very complex queries
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -5,6 +5,7 @@ import { WhmcsConfigService } from "../config/whmcs-config.service";
|
||||
import { WhmcsHttpClientService } from "./whmcs-http-client.service";
|
||||
import { WhmcsErrorHandlerService } from "./whmcs-error-handler.service";
|
||||
import { WhmcsApiMethodsService } from "./whmcs-api-methods.service";
|
||||
import { WhmcsRequestQueueService } from "@bff/core/queue/services/whmcs-request-queue.service";
|
||||
import type {
|
||||
WhmcsErrorResponse,
|
||||
WhmcsAddClientParams,
|
||||
@ -30,7 +31,8 @@ export class WhmcsConnectionOrchestratorService implements OnModuleInit {
|
||||
private readonly configService: WhmcsConfigService,
|
||||
private readonly httpClient: WhmcsHttpClientService,
|
||||
private readonly errorHandler: WhmcsErrorHandlerService,
|
||||
private readonly apiMethods: WhmcsApiMethodsService
|
||||
private readonly apiMethods: WhmcsApiMethodsService,
|
||||
private readonly requestQueue: WhmcsRequestQueueService
|
||||
) {}
|
||||
|
||||
async onModuleInit() {
|
||||
@ -57,32 +59,82 @@ export class WhmcsConnectionOrchestratorService implements OnModuleInit {
|
||||
// ==========================================
|
||||
|
||||
/**
|
||||
* Make a request to WHMCS API with full error handling
|
||||
* Make a request to WHMCS API with full error handling and queue management
|
||||
*/
|
||||
async makeRequest<T>(
|
||||
action: string,
|
||||
params: Record<string, unknown> = {},
|
||||
options: WhmcsRequestOptions = {}
|
||||
): Promise<T> {
|
||||
try {
|
||||
const config = this.configService.getConfig();
|
||||
const response = await this.httpClient.makeRequest<T>(config, action, params, options);
|
||||
// Wrap the actual request in the queue to prevent race conditions
|
||||
return this.requestQueue.execute(async () => {
|
||||
try {
|
||||
const config = this.configService.getConfig();
|
||||
const response = await this.httpClient.makeRequest<T>(config, action, params, options);
|
||||
|
||||
if (response.result === "error") {
|
||||
const errorResponse = response as WhmcsErrorResponse;
|
||||
this.errorHandler.handleApiError(errorResponse, action, params);
|
||||
if (response.result === "error") {
|
||||
const errorResponse = response as WhmcsErrorResponse;
|
||||
this.errorHandler.handleApiError(errorResponse, action, params);
|
||||
}
|
||||
|
||||
return response.data as T;
|
||||
} catch (error) {
|
||||
// If it's already a handled error, re-throw it
|
||||
if (this.isHandledException(error)) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Handle general request errors
|
||||
this.errorHandler.handleRequestError(error, action, params);
|
||||
}
|
||||
}, {
|
||||
priority: this.getRequestPriority(action),
|
||||
timeout: options.timeout,
|
||||
retryAttempts: options.retryAttempts,
|
||||
retryDelay: options.retryDelay,
|
||||
});
|
||||
}
|
||||
|
||||
return response.data as T;
|
||||
} catch (error) {
|
||||
// If it's already a handled error, re-throw it
|
||||
if (this.isHandledException(error)) {
|
||||
throw error;
|
||||
/**
|
||||
* Make a high-priority request to WHMCS API (jumps queue)
|
||||
*/
|
||||
async makeHighPriorityRequest<T>(
|
||||
action: string,
|
||||
params: Record<string, unknown> = {},
|
||||
options: WhmcsRequestOptions = {}
|
||||
): Promise<T> {
|
||||
return this.requestQueue.executeHighPriority(async () => {
|
||||
try {
|
||||
const config = this.configService.getConfig();
|
||||
const response = await this.httpClient.makeRequest<T>(config, action, params, options);
|
||||
|
||||
if (response.result === "error") {
|
||||
const errorResponse = response as WhmcsErrorResponse;
|
||||
this.errorHandler.handleApiError(errorResponse, action, params);
|
||||
}
|
||||
|
||||
return response.data as T;
|
||||
} catch (error) {
|
||||
if (this.isHandledException(error)) {
|
||||
throw error;
|
||||
}
|
||||
this.errorHandler.handleRequestError(error, action, params);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Handle general request errors
|
||||
this.errorHandler.handleRequestError(error, action, params);
|
||||
}
|
||||
/**
|
||||
* Get queue metrics for monitoring
|
||||
*/
|
||||
getQueueMetrics() {
|
||||
return this.requestQueue.getMetrics();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue health status
|
||||
*/
|
||||
getQueueHealth() {
|
||||
return this.requestQueue.getHealthStatus();
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
@ -224,6 +276,35 @@ export class WhmcsConnectionOrchestratorService implements OnModuleInit {
|
||||
// UTILITY METHODS
|
||||
// ==========================================
|
||||
|
||||
/**
|
||||
* Determine request priority based on action type
|
||||
*/
|
||||
private getRequestPriority(action: string): number {
|
||||
// High priority actions (critical for user experience)
|
||||
const highPriorityActions = [
|
||||
"ValidateLogin",
|
||||
"GetClientDetails",
|
||||
"GetInvoice",
|
||||
"CapturePayment",
|
||||
"CreateSsoToken"
|
||||
];
|
||||
|
||||
// Medium priority actions (important but can wait)
|
||||
const mediumPriorityActions = [
|
||||
"GetInvoices",
|
||||
"GetClientsProducts",
|
||||
"GetPayMethods"
|
||||
];
|
||||
|
||||
if (highPriorityActions.includes(action)) {
|
||||
return 8; // High priority
|
||||
} else if (mediumPriorityActions.includes(action)) {
|
||||
return 5; // Medium priority
|
||||
} else {
|
||||
return 2; // Low priority (default)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get connection statistics
|
||||
*/
|
||||
|
||||
@ -14,6 +14,7 @@ export interface WhmcsRequestOptions {
|
||||
useAdminAuth?: boolean;
|
||||
timeout?: number;
|
||||
retryAttempts?: number;
|
||||
retryDelay?: number;
|
||||
}
|
||||
|
||||
export interface WhmcsRetryConfig {
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { ConfigModule } from "@nestjs/config";
|
||||
import { QueueModule } from "@bff/core/queue/queue.module";
|
||||
import { WhmcsCacheService } from "./cache/whmcs-cache.service";
|
||||
import { WhmcsService } from "./whmcs.service";
|
||||
import { WhmcsInvoiceService } from "./services/whmcs-invoice.service";
|
||||
@ -22,7 +23,7 @@ import { WhmcsErrorHandlerService } from "./connection/services/whmcs-error-hand
|
||||
import { WhmcsApiMethodsService } from "./connection/services/whmcs-api-methods.service";
|
||||
|
||||
@Module({
|
||||
imports: [ConfigModule],
|
||||
imports: [ConfigModule, QueueModule],
|
||||
providers: [
|
||||
// New modular transformer services
|
||||
WhmcsTransformerOrchestratorService,
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { Controller, Post, Body, UseGuards, Get, Req, HttpCode, UsePipes } from "@nestjs/common";
|
||||
import type { Request } from "express";
|
||||
import { Controller, Post, Body, UseGuards, Get, Req, HttpCode, UsePipes, Res } from "@nestjs/common";
|
||||
import type { Request, Response } from "express";
|
||||
import { Throttle } from "@nestjs/throttler";
|
||||
import { AuthService } from "./auth.service";
|
||||
import { LocalAuthGuard } from "./guards/local-auth.guard";
|
||||
@ -33,12 +33,62 @@ import {
|
||||
refreshTokenRequestSchema,
|
||||
type RefreshTokenRequestInput,
|
||||
} from "@customer-portal/domain";
|
||||
import type { AuthTokens } from "@customer-portal/domain";
|
||||
|
||||
type RequestWithCookies = Request & { cookies?: Record<string, string | undefined> };
|
||||
|
||||
const EXTRACT_BEARER = (req: RequestWithCookies): string | undefined => {
|
||||
const authHeader = req.headers?.authorization;
|
||||
if (typeof authHeader === "string" && authHeader.startsWith("Bearer ")) {
|
||||
return authHeader.slice(7);
|
||||
}
|
||||
if (Array.isArray(authHeader) && authHeader.length > 0 && authHeader[0]?.startsWith("Bearer ")) {
|
||||
return authHeader[0]?.slice(7);
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const extractTokenFromRequest = (req: RequestWithCookies): string | undefined => {
|
||||
const headerToken = EXTRACT_BEARER(req);
|
||||
if (headerToken) {
|
||||
return headerToken;
|
||||
}
|
||||
const cookieToken = req.cookies?.access_token;
|
||||
return typeof cookieToken === "string" && cookieToken.length > 0 ? cookieToken : undefined;
|
||||
};
|
||||
|
||||
const calculateCookieMaxAge = (isoTimestamp: string): number => {
|
||||
const expiresAt = Date.parse(isoTimestamp);
|
||||
if (Number.isNaN(expiresAt)) {
|
||||
return 0;
|
||||
}
|
||||
return Math.max(0, expiresAt - Date.now());
|
||||
};
|
||||
|
||||
@ApiTags("auth")
|
||||
@Controller("auth")
|
||||
export class AuthController {
|
||||
constructor(private authService: AuthService) {}
|
||||
|
||||
private setAuthCookies(res: Response, tokens: AuthTokens): void {
|
||||
const accessMaxAge = calculateCookieMaxAge(tokens.expiresAt);
|
||||
const refreshMaxAge = calculateCookieMaxAge(tokens.refreshExpiresAt);
|
||||
|
||||
res.setSecureCookie("access_token", tokens.accessToken, {
|
||||
maxAge: accessMaxAge,
|
||||
path: "/",
|
||||
});
|
||||
res.setSecureCookie("refresh_token", tokens.refreshToken, {
|
||||
maxAge: refreshMaxAge,
|
||||
path: "/",
|
||||
});
|
||||
}
|
||||
|
||||
private clearAuthCookies(res: Response): void {
|
||||
res.setSecureCookie("access_token", "", { maxAge: 0, path: "/" });
|
||||
res.setSecureCookie("refresh_token", "", { maxAge: 0, path: "/" });
|
||||
}
|
||||
|
||||
@Public()
|
||||
@Post("validate-signup")
|
||||
@UseGuards(AuthThrottleGuard)
|
||||
@ -49,7 +99,10 @@ export class AuthController {
|
||||
@ApiResponse({ status: 409, description: "Customer already has account" })
|
||||
@ApiResponse({ status: 400, description: "Customer number not found" })
|
||||
@ApiResponse({ status: 429, description: "Too many validation attempts" })
|
||||
async validateSignup(@Body() validateData: ValidateSignupRequestInput, @Req() req: Request) {
|
||||
async validateSignup(
|
||||
@Body() validateData: ValidateSignupRequestInput,
|
||||
@Req() req: Request
|
||||
) {
|
||||
return this.authService.validateSignup(validateData, req);
|
||||
}
|
||||
|
||||
@ -91,8 +144,14 @@ export class AuthController {
|
||||
@ApiResponse({ status: 201, description: "User created successfully" })
|
||||
@ApiResponse({ status: 409, description: "User already exists" })
|
||||
@ApiResponse({ status: 429, description: "Too many signup attempts" })
|
||||
async signup(@Body() signupData: SignupRequestInput, @Req() req: Request) {
|
||||
return this.authService.signup(signupData, req);
|
||||
async signup(
|
||||
@Body() signupData: SignupRequestInput,
|
||||
@Req() req: Request,
|
||||
@Res({ passthrough: true }) res: Response
|
||||
) {
|
||||
const result = await this.authService.signup(signupData, req);
|
||||
this.setAuthCookies(res, result.tokens);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Public()
|
||||
@ -103,23 +162,25 @@ export class AuthController {
|
||||
@ApiResponse({ status: 200, description: "Login successful" })
|
||||
@ApiResponse({ status: 401, description: "Invalid credentials" })
|
||||
@ApiResponse({ status: 429, description: "Too many login attempts" })
|
||||
async login(@Req() req: Request & { user: { id: string; email: string; role: string } }) {
|
||||
return this.authService.login(req.user, req);
|
||||
async login(
|
||||
@Req() req: Request & { user: { id: string; email: string; role: string } },
|
||||
@Res({ passthrough: true }) res: Response
|
||||
) {
|
||||
const result = await this.authService.login(req.user, req);
|
||||
this.setAuthCookies(res, result.tokens);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Post("logout")
|
||||
@ApiOperation({ summary: "Logout user" })
|
||||
@ApiResponse({ status: 200, description: "Logout successful" })
|
||||
async logout(@Req() req: Request & { user: { id: string } }) {
|
||||
const authHeader = req.headers.authorization as string | string[] | undefined;
|
||||
let bearer: string | undefined;
|
||||
if (typeof authHeader === "string") {
|
||||
bearer = authHeader;
|
||||
} else if (Array.isArray(authHeader) && authHeader.length > 0) {
|
||||
bearer = authHeader[0];
|
||||
}
|
||||
const token = bearer?.startsWith("Bearer ") ? bearer.slice(7) : undefined;
|
||||
await this.authService.logout(req.user.id, token ?? "", req);
|
||||
async logout(
|
||||
@Req() req: RequestWithCookies & { user: { id: string } },
|
||||
@Res({ passthrough: true }) res: Response
|
||||
) {
|
||||
const token = extractTokenFromRequest(req);
|
||||
await this.authService.logout(req.user.id, token, req);
|
||||
this.clearAuthCookies(res);
|
||||
return { message: "Logout successful" };
|
||||
}
|
||||
|
||||
@ -131,11 +192,18 @@ export class AuthController {
|
||||
@ApiResponse({ status: 200, description: "Token refreshed successfully" })
|
||||
@ApiResponse({ status: 401, description: "Invalid refresh token" })
|
||||
@ApiResponse({ status: 429, description: "Too many refresh attempts" })
|
||||
async refreshToken(@Body() body: RefreshTokenRequestInput, @Req() req: Request) {
|
||||
return this.authService.refreshTokens(body.refreshToken, {
|
||||
async refreshToken(
|
||||
@Body() body: RefreshTokenRequestInput,
|
||||
@Req() req: RequestWithCookies,
|
||||
@Res({ passthrough: true }) res: Response
|
||||
) {
|
||||
const refreshToken = body.refreshToken ?? req.cookies?.refresh_token;
|
||||
const result = await this.authService.refreshTokens(refreshToken, {
|
||||
deviceId: body.deviceId,
|
||||
userAgent: req.headers["user-agent"],
|
||||
});
|
||||
this.setAuthCookies(res, result.tokens);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Public()
|
||||
@ -163,8 +231,14 @@ export class AuthController {
|
||||
@ApiResponse({ status: 200, description: "Password set successfully" })
|
||||
@ApiResponse({ status: 401, description: "User not found" })
|
||||
@ApiResponse({ status: 429, description: "Too many password attempts" })
|
||||
async setPassword(@Body() setPasswordData: SetPasswordRequestInput, @Req() _req: Request) {
|
||||
return this.authService.setPassword(setPasswordData);
|
||||
async setPassword(
|
||||
@Body() setPasswordData: SetPasswordRequestInput,
|
||||
@Req() _req: Request,
|
||||
@Res({ passthrough: true }) res: Response
|
||||
) {
|
||||
const result = await this.authService.setPassword(setPasswordData);
|
||||
this.setAuthCookies(res, result.tokens);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Public()
|
||||
@ -194,8 +268,10 @@ export class AuthController {
|
||||
@UsePipes(new ZodValidationPipe(passwordResetSchema))
|
||||
@ApiOperation({ summary: "Reset password with token" })
|
||||
@ApiResponse({ status: 200, description: "Password reset successful" })
|
||||
async resetPassword(@Body() body: PasswordResetInput) {
|
||||
return this.authService.resetPassword(body.token, body.password);
|
||||
async resetPassword(@Body() body: PasswordResetInput, @Res({ passthrough: true }) res: Response) {
|
||||
const result = await this.authService.resetPassword(body.token, body.password);
|
||||
this.setAuthCookies(res, result.tokens);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Post("change-password")
|
||||
@ -205,14 +281,17 @@ export class AuthController {
|
||||
@ApiResponse({ status: 200, description: "Password changed successfully" })
|
||||
async changePassword(
|
||||
@Req() req: Request & { user: { id: string } },
|
||||
@Body() body: ChangePasswordRequestInput
|
||||
@Body() body: ChangePasswordRequestInput,
|
||||
@Res({ passthrough: true }) res: Response
|
||||
) {
|
||||
return this.authService.changePassword(
|
||||
const result = await this.authService.changePassword(
|
||||
req.user.id,
|
||||
body.currentPassword,
|
||||
body.newPassword,
|
||||
req
|
||||
);
|
||||
this.setAuthCookies(res, result.tokens);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Get("me")
|
||||
|
||||
@ -290,9 +290,19 @@ export class AuthService {
|
||||
}
|
||||
}
|
||||
|
||||
async logout(userId: string, token: string, _request?: Request): Promise<void> {
|
||||
// Blacklist the token
|
||||
await this.tokenBlacklistService.blacklistToken(token);
|
||||
async logout(userId: string, token?: string, _request?: Request): Promise<void> {
|
||||
if (token) {
|
||||
await this.tokenBlacklistService.blacklistToken(token);
|
||||
}
|
||||
|
||||
try {
|
||||
await this.tokenService.revokeAllUserTokens(userId);
|
||||
} catch (error) {
|
||||
this.logger.warn("Failed to revoke refresh tokens during logout", {
|
||||
userId,
|
||||
error: getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
|
||||
await this.auditService.logAuthEvent(AuditAction.LOGOUT, userId, {}, _request, true);
|
||||
}
|
||||
@ -431,10 +441,18 @@ export class AuthService {
|
||||
}
|
||||
|
||||
async refreshTokens(
|
||||
refreshToken: string,
|
||||
refreshToken: string | undefined,
|
||||
deviceInfo?: { deviceId?: string; userAgent?: string }
|
||||
) {
|
||||
return this.tokenService.refreshTokens(refreshToken, deviceInfo);
|
||||
if (!refreshToken) {
|
||||
throw new UnauthorizedException("Invalid refresh token");
|
||||
}
|
||||
|
||||
const { tokens, user } = await this.tokenService.refreshTokens(refreshToken, deviceInfo);
|
||||
return {
|
||||
user,
|
||||
tokens,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -9,9 +9,23 @@ import { Reflector } from "@nestjs/core";
|
||||
import { AuthGuard } from "@nestjs/passport";
|
||||
import { ExtractJwt } from "passport-jwt";
|
||||
|
||||
import type { Request } from "express";
|
||||
|
||||
import { TokenBlacklistService } from "../services/token-blacklist.service";
|
||||
import { IS_PUBLIC_KEY } from "../decorators/public.decorator";
|
||||
|
||||
type RequestWithCookies = Request & { cookies?: Record<string, string | undefined> };
|
||||
|
||||
const headerExtractor = ExtractJwt.fromAuthHeaderAsBearerToken();
|
||||
const extractTokenFromRequest = (request: RequestWithCookies): string | undefined => {
|
||||
const headerToken = headerExtractor(request);
|
||||
if (headerToken) {
|
||||
return headerToken;
|
||||
}
|
||||
const cookieToken = request.cookies?.access_token;
|
||||
return typeof cookieToken === "string" && cookieToken.length > 0 ? cookieToken : undefined;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class GlobalAuthGuard extends AuthGuard("jwt") implements CanActivate {
|
||||
private readonly logger = new Logger(GlobalAuthGuard.name);
|
||||
@ -24,11 +38,9 @@ export class GlobalAuthGuard extends AuthGuard("jwt") implements CanActivate {
|
||||
}
|
||||
|
||||
override async canActivate(context: ExecutionContext): Promise<boolean> {
|
||||
const request = context.switchToHttp().getRequest<{
|
||||
method: string;
|
||||
url: string;
|
||||
route?: { path?: string };
|
||||
}>();
|
||||
const request = context
|
||||
.switchToHttp()
|
||||
.getRequest<RequestWithCookies & { method: string; url: string; route?: { path?: string } }>();
|
||||
const route = `${request.method} ${request.route?.path ?? request.url}`;
|
||||
|
||||
// Check if the route is marked as public
|
||||
@ -51,7 +63,7 @@ export class GlobalAuthGuard extends AuthGuard("jwt") implements CanActivate {
|
||||
}
|
||||
|
||||
// Then check token blacklist
|
||||
const token = ExtractJwt.fromAuthHeaderAsBearerToken()(request);
|
||||
const token = extractTokenFromRequest(request);
|
||||
|
||||
if (token) {
|
||||
const isBlacklisted = await this.tokenBlacklistService.isTokenBlacklisted(token);
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { JwtService } from "@nestjs/jwt";
|
||||
import { createHash } from "crypto";
|
||||
import { Redis } from "ioredis";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { parseJwtExpiry } from "../utils/jwt-expiry.util";
|
||||
@ -41,7 +42,7 @@ export class TokenBlacklistService {
|
||||
const ttl = Math.max(0, Math.floor((expiryTime - currentTime) / 1000)); // Convert to seconds
|
||||
|
||||
if (ttl > 0) {
|
||||
await this.redis.setex(`blacklist:${token}`, ttl, "1");
|
||||
await this.redis.setex(this.buildBlacklistKey(token), ttl, "1");
|
||||
this.logger.debug(`Token blacklisted successfully for ${ttl} seconds`);
|
||||
} else {
|
||||
this.logger.debug("Token already expired, not blacklisting");
|
||||
@ -50,7 +51,7 @@ export class TokenBlacklistService {
|
||||
// If we can't parse the token, blacklist it for the default JWT expiry time
|
||||
try {
|
||||
const defaultTtl = parseJwtExpiry(this.configService.get("JWT_EXPIRES_IN", "7d"));
|
||||
await this.redis.setex(`blacklist:${token}`, defaultTtl, "1");
|
||||
await this.redis.setex(this.buildBlacklistKey(token), defaultTtl, "1");
|
||||
this.logger.debug(`Token blacklisted with default TTL: ${defaultTtl} seconds`);
|
||||
} catch (err) {
|
||||
this.logger.warn(
|
||||
@ -65,7 +66,7 @@ export class TokenBlacklistService {
|
||||
|
||||
async isTokenBlacklisted(token: string): Promise<boolean> {
|
||||
try {
|
||||
const result = await this.redis.get(`blacklist:${token}`);
|
||||
const result = await this.redis.get(this.buildBlacklistKey(token));
|
||||
return result !== null;
|
||||
} catch (err) {
|
||||
// If Redis is unavailable, treat as not blacklisted to avoid blocking auth
|
||||
@ -75,4 +76,12 @@ export class TokenBlacklistService {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private buildBlacklistKey(token: string): string {
|
||||
return `blacklist:${this.hashToken(token)}`;
|
||||
}
|
||||
|
||||
private hashToken(token: string): string {
|
||||
return createHash("sha256").update(token).digest("hex");
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,8 +4,9 @@ import { ConfigService } from "@nestjs/config";
|
||||
import { Redis } from "ioredis";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { randomBytes, createHash } from "crypto";
|
||||
import type { AuthTokens } from "@customer-portal/domain";
|
||||
import type { AuthTokens, AuthenticatedUser } from "@customer-portal/domain";
|
||||
import { UsersService } from "@bff/modules/users/users.service";
|
||||
import { mapPrismaUserToUserProfile } from "@bff/infra/utils/user-mapper.util";
|
||||
|
||||
export interface RefreshTokenPayload {
|
||||
userId: string;
|
||||
@ -156,7 +157,10 @@ export class AuthTokenService {
|
||||
deviceId?: string;
|
||||
userAgent?: string;
|
||||
}
|
||||
): Promise<AuthTokens> {
|
||||
): Promise<{ tokens: AuthTokens; user: AuthenticatedUser }> {
|
||||
if (!refreshToken) {
|
||||
throw new UnauthorizedException("Invalid refresh token");
|
||||
}
|
||||
try {
|
||||
// Verify refresh token
|
||||
const payload = this.jwtService.verify<RefreshTokenPayload>(refreshToken);
|
||||
@ -225,10 +229,14 @@ export class AuthTokenService {
|
||||
|
||||
// Generate new token pair
|
||||
const newTokenPair = await this.generateTokenPair(user, deviceInfo);
|
||||
const userProfile = mapPrismaUserToUserProfile(prismaUser);
|
||||
|
||||
this.logger.debug("Refreshed token pair", { userId: payload.userId });
|
||||
|
||||
return newTokenPair;
|
||||
return {
|
||||
tokens: newTokenPair,
|
||||
user: userProfile,
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error("Token refresh failed", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
@ -248,7 +256,7 @@ export class AuthTokenService {
|
||||
.catch(() => null);
|
||||
|
||||
if (fallbackUser) {
|
||||
return this.generateTokenPair(
|
||||
const fallbackTokens = await this.generateTokenPair(
|
||||
{
|
||||
id: fallbackUser.id,
|
||||
email: fallbackUser.email,
|
||||
@ -256,6 +264,11 @@ export class AuthTokenService {
|
||||
},
|
||||
deviceInfo
|
||||
);
|
||||
|
||||
return {
|
||||
tokens: fallbackTokens,
|
||||
user: mapPrismaUserToUserProfile(fallbackUser),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -294,19 +307,26 @@ export class AuthTokenService {
|
||||
*/
|
||||
async revokeAllUserTokens(userId: string): Promise<void> {
|
||||
try {
|
||||
let cursor = "0";
|
||||
const pattern = `${this.REFRESH_TOKEN_FAMILY_PREFIX}*`;
|
||||
const keys = await this.redis.keys(pattern);
|
||||
|
||||
for (const key of keys) {
|
||||
const data = await this.redis.get(key);
|
||||
if (data) {
|
||||
const family = this.parseRefreshTokenFamilyRecord(data);
|
||||
if (family && family.userId === userId) {
|
||||
await this.redis.del(key);
|
||||
await this.redis.del(`${this.REFRESH_TOKEN_PREFIX}${family.tokenHash}`);
|
||||
do {
|
||||
const [nextCursor, keys] = await this.redis.scan(cursor, "MATCH", pattern, "COUNT", 100);
|
||||
cursor = nextCursor;
|
||||
|
||||
if (keys && keys.length) {
|
||||
for (const key of keys) {
|
||||
const data = await this.redis.get(key);
|
||||
if (!data) continue;
|
||||
|
||||
const family = this.parseRefreshTokenFamilyRecord(data);
|
||||
if (family && family.userId === userId) {
|
||||
await this.redis.del(key);
|
||||
await this.redis.del(`${this.REFRESH_TOKEN_PREFIX}${family.tokenHash}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} while (cursor !== "0");
|
||||
|
||||
this.logger.debug("Revoked all tokens for user", { userId });
|
||||
} catch (error) {
|
||||
|
||||
@ -52,7 +52,10 @@ export class PasswordWorkflowService {
|
||||
throw new BadRequestException("User already has a password set");
|
||||
}
|
||||
|
||||
const passwordHash = await bcrypt.hash(password, 12);
|
||||
const saltRoundsConfig = this.configService.get<string | number>("BCRYPT_ROUNDS", 12);
|
||||
const saltRounds =
|
||||
typeof saltRoundsConfig === "string" ? Number(saltRoundsConfig) : saltRoundsConfig;
|
||||
const passwordHash = await bcrypt.hash(password, saltRounds);
|
||||
await this.usersService.update(user.id, { passwordHash });
|
||||
const prismaUser = await this.usersService.findByIdInternal(user.id);
|
||||
if (!prismaUser) {
|
||||
|
||||
@ -5,6 +5,15 @@ import { ConfigService } from "@nestjs/config";
|
||||
import type { AuthenticatedUser } from "@customer-portal/domain";
|
||||
import { UsersService } from "@bff/modules/users/users.service";
|
||||
import { mapPrismaUserToUserProfile } from "@bff/infra/utils/user-mapper.util";
|
||||
import type { Request } from "express";
|
||||
|
||||
const cookieExtractor = (req: Request): string | null => {
|
||||
const cookieToken = req?.cookies?.access_token;
|
||||
if (typeof cookieToken === "string" && cookieToken.length > 0) {
|
||||
return cookieToken;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class JwtStrategy extends PassportStrategy(Strategy) {
|
||||
@ -18,7 +27,10 @@ export class JwtStrategy extends PassportStrategy(Strategy) {
|
||||
}
|
||||
|
||||
const options = {
|
||||
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
|
||||
jwtFromRequest: ExtractJwt.fromExtractors([
|
||||
ExtractJwt.fromAuthHeaderAsBearerToken(),
|
||||
cookieExtractor,
|
||||
]),
|
||||
ignoreExpiration: false,
|
||||
secretOrKey: jwtSecret,
|
||||
};
|
||||
|
||||
@ -2,6 +2,7 @@ import { Module } from "@nestjs/common";
|
||||
import { CatalogController } from "./catalog.controller";
|
||||
import { IntegrationsModule } from "@bff/integrations/integrations.module";
|
||||
import { MappingsModule } from "@bff/modules/id-mappings/mappings.module";
|
||||
import { CoreConfigModule } from "@bff/core/config/config.module";
|
||||
|
||||
import { BaseCatalogService } from "./services/base-catalog.service";
|
||||
import { InternetCatalogService } from "./services/internet-catalog.service";
|
||||
@ -9,7 +10,7 @@ import { SimCatalogService } from "./services/sim-catalog.service";
|
||||
import { VpnCatalogService } from "./services/vpn-catalog.service";
|
||||
|
||||
@Module({
|
||||
imports: [IntegrationsModule, MappingsModule],
|
||||
imports: [IntegrationsModule, MappingsModule, CoreConfigModule],
|
||||
controllers: [CatalogController],
|
||||
providers: [BaseCatalogService, InternetCatalogService, SimCatalogService, VpnCatalogService],
|
||||
exports: [InternetCatalogService, SimCatalogService, VpnCatalogService],
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { SalesforceConnection } from "@bff/integrations/salesforce/services/salesforce-connection.service";
|
||||
import { getSalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
import { SalesforceFieldMapService } from "@bff/core/config/field-map";
|
||||
import {
|
||||
assertSalesforceId,
|
||||
sanitizeSoqlLiteral,
|
||||
@ -19,14 +20,16 @@ export class BaseCatalogService {
|
||||
|
||||
constructor(
|
||||
protected readonly sf: SalesforceConnection,
|
||||
protected readonly fieldMapService: SalesforceFieldMapService,
|
||||
private readonly configService: ConfigService,
|
||||
@Inject(Logger) protected readonly logger: Logger
|
||||
) {
|
||||
const portalPricebook = process.env.PORTAL_PRICEBOOK_ID || "01sTL000008eLVlYAM";
|
||||
const portalPricebook = this.configService.get<string>("PORTAL_PRICEBOOK_ID")!;
|
||||
this.portalPriceBookId = assertSalesforceId(portalPricebook, "PORTAL_PRICEBOOK_ID");
|
||||
}
|
||||
|
||||
protected getFields() {
|
||||
return getSalesforceFieldMap();
|
||||
return this.fieldMapService.getFieldMap();
|
||||
}
|
||||
|
||||
protected async executeQuery<TRecord extends SalesforceProduct2WithPricebookEntries>(
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { BaseCatalogService } from "./base-catalog.service";
|
||||
import type {
|
||||
SalesforceProduct2WithPricebookEntries,
|
||||
@ -8,6 +9,7 @@ import type {
|
||||
} from "@customer-portal/domain";
|
||||
import { MappingsService } from "@bff/modules/id-mappings/mappings.service";
|
||||
import { SalesforceConnection } from "@bff/integrations/salesforce/services/salesforce-connection.service";
|
||||
import { SalesforceFieldMapService } from "@bff/core/config/field-map";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { getErrorMessage } from "@bff/core/utils/error.util";
|
||||
import { assertSalesforceId } from "@bff/integrations/salesforce/utils/soql.util";
|
||||
@ -26,10 +28,12 @@ interface SalesforceAccount {
|
||||
export class InternetCatalogService extends BaseCatalogService {
|
||||
constructor(
|
||||
sf: SalesforceConnection,
|
||||
fieldMapService: SalesforceFieldMapService,
|
||||
configService: ConfigService,
|
||||
@Inject(Logger) logger: Logger,
|
||||
private mappingsService: MappingsService
|
||||
) {
|
||||
super(sf, logger);
|
||||
super(sf, fieldMapService, configService, logger);
|
||||
}
|
||||
|
||||
async getPlans(): Promise<InternetPlanCatalogItem[]> {
|
||||
@ -46,7 +50,7 @@ export class InternetCatalogService extends BaseCatalogService {
|
||||
|
||||
return records.map(record => {
|
||||
const entry = this.extractPricebookEntry(record);
|
||||
return mapInternetPlan(record, entry);
|
||||
return mapInternetPlan(record, fields, entry);
|
||||
});
|
||||
}
|
||||
|
||||
@ -66,7 +70,7 @@ export class InternetCatalogService extends BaseCatalogService {
|
||||
return records
|
||||
.map(record => {
|
||||
const entry = this.extractPricebookEntry(record);
|
||||
return mapInternetInstallation(record, entry);
|
||||
return mapInternetInstallation(record, fields, entry);
|
||||
})
|
||||
.sort((a, b) => (a.displayOrder ?? 0) - (b.displayOrder ?? 0));
|
||||
}
|
||||
@ -89,7 +93,7 @@ export class InternetCatalogService extends BaseCatalogService {
|
||||
return records
|
||||
.map(record => {
|
||||
const entry = this.extractPricebookEntry(record);
|
||||
return mapInternetAddon(record, entry);
|
||||
return mapInternetAddon(record, fields, entry);
|
||||
})
|
||||
.sort((a, b) => (a.displayOrder ?? 0) - (b.displayOrder ?? 0));
|
||||
}
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { BaseCatalogService } from "./base-catalog.service";
|
||||
import { SalesforceFieldMapService } from "@bff/core/config/field-map";
|
||||
import type {
|
||||
SalesforceProduct2WithPricebookEntries,
|
||||
SimCatalogProduct,
|
||||
@ -18,11 +20,13 @@ import { WhmcsConnectionOrchestratorService } from "@bff/integrations/whmcs/conn
|
||||
export class SimCatalogService extends BaseCatalogService {
|
||||
constructor(
|
||||
sf: SalesforceConnection,
|
||||
fieldMapService: SalesforceFieldMapService,
|
||||
configService: ConfigService,
|
||||
@Inject(Logger) logger: Logger,
|
||||
private mappingsService: MappingsService,
|
||||
private whmcs: WhmcsConnectionOrchestratorService
|
||||
) {
|
||||
super(sf, logger);
|
||||
super(sf, fieldMapService, configService, logger);
|
||||
}
|
||||
|
||||
async getPlans(): Promise<SimCatalogProduct[]> {
|
||||
@ -40,7 +44,7 @@ export class SimCatalogService extends BaseCatalogService {
|
||||
|
||||
return records.map(record => {
|
||||
const entry = this.extractPricebookEntry(record);
|
||||
const product = mapSimProduct(record, entry);
|
||||
const product = mapSimProduct(record, fields, entry);
|
||||
|
||||
return {
|
||||
...product,
|
||||
@ -50,6 +54,7 @@ export class SimCatalogService extends BaseCatalogService {
|
||||
}
|
||||
|
||||
async getActivationFees(): Promise<SimActivationFeeCatalogItem[]> {
|
||||
const fields = this.getFields();
|
||||
const soql = this.buildProductQuery("SIM", "Activation", []);
|
||||
const records = await this.executeQuery<SalesforceProduct2WithPricebookEntries>(
|
||||
soql,
|
||||
@ -58,7 +63,7 @@ export class SimCatalogService extends BaseCatalogService {
|
||||
|
||||
return records.map(record => {
|
||||
const entry = this.extractPricebookEntry(record);
|
||||
return mapSimActivationFee(record, entry);
|
||||
return mapSimActivationFee(record, fields, entry);
|
||||
});
|
||||
}
|
||||
|
||||
@ -78,7 +83,7 @@ export class SimCatalogService extends BaseCatalogService {
|
||||
return records
|
||||
.map(record => {
|
||||
const entry = this.extractPricebookEntry(record);
|
||||
const product = mapSimProduct(record, entry);
|
||||
const product = mapSimProduct(record, fields, entry);
|
||||
|
||||
return {
|
||||
...product,
|
||||
|
||||
@ -1,4 +1,8 @@
|
||||
import { Injectable } from "@nestjs/common";
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { SalesforceConnection } from "@bff/integrations/salesforce/services/salesforce-connection.service";
|
||||
import { SalesforceFieldMapService } from "@bff/core/config/field-map";
|
||||
import { BaseCatalogService } from "./base-catalog.service";
|
||||
import type {
|
||||
SalesforceProduct2WithPricebookEntries,
|
||||
@ -8,6 +12,14 @@ import { mapVpnProduct } from "@bff/modules/catalog/utils/salesforce-product.map
|
||||
|
||||
@Injectable()
|
||||
export class VpnCatalogService extends BaseCatalogService {
|
||||
constructor(
|
||||
sf: SalesforceConnection,
|
||||
fieldMapService: SalesforceFieldMapService,
|
||||
configService: ConfigService,
|
||||
@Inject(Logger) logger: Logger
|
||||
) {
|
||||
super(sf, fieldMapService, configService, logger);
|
||||
}
|
||||
async getPlans(): Promise<VpnCatalogProduct[]> {
|
||||
const fields = this.getFields();
|
||||
const soql = this.buildCatalogServiceQuery("VPN", [
|
||||
@ -21,7 +33,7 @@ export class VpnCatalogService extends BaseCatalogService {
|
||||
|
||||
return records.map(record => {
|
||||
const entry = this.extractPricebookEntry(record);
|
||||
const product = mapVpnProduct(record, entry);
|
||||
const product = mapVpnProduct(record, fields, entry);
|
||||
return {
|
||||
...product,
|
||||
description: product.description || product.name,
|
||||
@ -39,7 +51,7 @@ export class VpnCatalogService extends BaseCatalogService {
|
||||
|
||||
return records.map(record => {
|
||||
const pricebookEntry = this.extractPricebookEntry(record);
|
||||
const product = mapVpnProduct(record, pricebookEntry);
|
||||
const product = mapVpnProduct(record, fields, pricebookEntry);
|
||||
|
||||
return {
|
||||
...product,
|
||||
|
||||
@ -12,9 +12,7 @@ import type {
|
||||
SalesforceProduct2WithPricebookEntries,
|
||||
SalesforcePricebookEntryRecord,
|
||||
} from "@customer-portal/domain";
|
||||
import { getSalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
|
||||
const fieldMap = getSalesforceFieldMap();
|
||||
import type { SalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
|
||||
export type SalesforceCatalogProductRecord = SalesforceProduct2WithPricebookEntries;
|
||||
|
||||
@ -86,7 +84,8 @@ function inferInstallationTypeFromSku(sku: string): "One-time" | "12-Month" | "2
|
||||
|
||||
function getProductField<T = unknown>(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
fieldKey: keyof typeof fieldMap.product
|
||||
fieldKey: keyof SalesforceFieldMap["product"],
|
||||
fieldMap: SalesforceFieldMap
|
||||
): T | undefined {
|
||||
const salesforceField = fieldMap.product[fieldKey] as keyof SalesforceCatalogProductRecord;
|
||||
const value = product[salesforceField];
|
||||
@ -95,9 +94,10 @@ function getProductField<T = unknown>(
|
||||
|
||||
export function getStringField(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
fieldKey: keyof typeof fieldMap.product
|
||||
fieldKey: keyof SalesforceFieldMap["product"],
|
||||
fieldMap: SalesforceFieldMap
|
||||
): string | undefined {
|
||||
const value = getProductField(product, fieldKey);
|
||||
const value = getProductField(product, fieldKey, fieldMap);
|
||||
return typeof value === "string" ? value : undefined;
|
||||
}
|
||||
|
||||
@ -110,8 +110,8 @@ function coerceNumber(value: unknown): number | undefined {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function baseProduct(product: SalesforceCatalogProductRecord): CatalogProductBase {
|
||||
const sku = getStringField(product, "sku") ?? "";
|
||||
function baseProduct(product: SalesforceCatalogProductRecord, fieldMap: SalesforceFieldMap): CatalogProductBase {
|
||||
const sku = getStringField(product, "sku", fieldMap) ?? "";
|
||||
const base: CatalogProductBase = {
|
||||
id: product.Id,
|
||||
sku,
|
||||
@ -121,37 +121,42 @@ function baseProduct(product: SalesforceCatalogProductRecord): CatalogProductBas
|
||||
const description = product.Description;
|
||||
if (description) base.description = description;
|
||||
|
||||
const billingCycle = getStringField(product, "billingCycle");
|
||||
const billingCycle = getStringField(product, "billingCycle", fieldMap);
|
||||
if (billingCycle) base.billingCycle = billingCycle;
|
||||
|
||||
const displayOrder = getProductField(product, "displayOrder");
|
||||
const displayOrder = getProductField(product, "displayOrder", fieldMap);
|
||||
if (typeof displayOrder === "number") base.displayOrder = displayOrder;
|
||||
|
||||
return base;
|
||||
}
|
||||
|
||||
function getBoolean(product: SalesforceCatalogProductRecord, key: keyof typeof fieldMap.product) {
|
||||
const value = getProductField(product, key);
|
||||
function getBoolean(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
key: keyof SalesforceFieldMap["product"],
|
||||
fieldMap: SalesforceFieldMap
|
||||
) {
|
||||
const value = getProductField(product, key, fieldMap);
|
||||
return typeof value === "boolean" ? value : undefined;
|
||||
}
|
||||
|
||||
function resolveBundledAddonId(product: SalesforceCatalogProductRecord): string | undefined {
|
||||
const raw = getProductField(product, "bundledAddon");
|
||||
function resolveBundledAddonId(product: SalesforceCatalogProductRecord, fieldMap: SalesforceFieldMap): string | undefined {
|
||||
const raw = getProductField(product, "bundledAddon", fieldMap);
|
||||
return typeof raw === "string" && raw.length > 0 ? raw : undefined;
|
||||
}
|
||||
|
||||
function resolveBundledAddon(product: SalesforceCatalogProductRecord) {
|
||||
function resolveBundledAddon(product: SalesforceCatalogProductRecord, fieldMap: SalesforceFieldMap) {
|
||||
return {
|
||||
bundledAddonId: resolveBundledAddonId(product),
|
||||
isBundledAddon: Boolean(getBoolean(product, "isBundledAddon")),
|
||||
bundledAddonId: resolveBundledAddonId(product, fieldMap),
|
||||
isBundledAddon: Boolean(getBoolean(product, "isBundledAddon", fieldMap)),
|
||||
};
|
||||
}
|
||||
|
||||
function derivePrices(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
fieldMap: SalesforceFieldMap,
|
||||
pricebookEntry?: SalesforcePricebookEntryRecord
|
||||
): Pick<CatalogProductBase, "monthlyPrice" | "oneTimePrice"> {
|
||||
const billingCycle = getStringField(product, "billingCycle")?.toLowerCase();
|
||||
const billingCycle = getStringField(product, "billingCycle", fieldMap)?.toLowerCase();
|
||||
const unitPrice = coerceNumber(pricebookEntry?.UnitPrice);
|
||||
|
||||
let monthlyPrice: number | undefined;
|
||||
@ -173,12 +178,13 @@ function derivePrices(
|
||||
|
||||
export function mapInternetPlan(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
fieldMap: SalesforceFieldMap,
|
||||
pricebookEntry?: SalesforcePricebookEntryRecord
|
||||
): InternetPlanCatalogItem {
|
||||
const base = baseProduct(product);
|
||||
const prices = derivePrices(product, pricebookEntry);
|
||||
const tier = getStringField(product, "internetPlanTier");
|
||||
const offeringType = getStringField(product, "internetOfferingType");
|
||||
const base = baseProduct(product, fieldMap);
|
||||
const prices = derivePrices(product, fieldMap, pricebookEntry);
|
||||
const tier = getStringField(product, "internetPlanTier", fieldMap);
|
||||
const offeringType = getStringField(product, "internetOfferingType", fieldMap);
|
||||
|
||||
const tierData = getTierTemplate(tier);
|
||||
|
||||
@ -200,10 +206,11 @@ export function mapInternetPlan(
|
||||
|
||||
export function mapInternetInstallation(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
fieldMap: SalesforceFieldMap,
|
||||
pricebookEntry?: SalesforcePricebookEntryRecord
|
||||
): InternetInstallationCatalogItem {
|
||||
const base = baseProduct(product);
|
||||
const prices = derivePrices(product, pricebookEntry);
|
||||
const base = baseProduct(product, fieldMap);
|
||||
const prices = derivePrices(product, fieldMap, pricebookEntry);
|
||||
|
||||
return {
|
||||
...base,
|
||||
@ -216,11 +223,12 @@ export function mapInternetInstallation(
|
||||
|
||||
export function mapInternetAddon(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
fieldMap: SalesforceFieldMap,
|
||||
pricebookEntry?: SalesforcePricebookEntryRecord
|
||||
): InternetAddonCatalogItem {
|
||||
const base = baseProduct(product);
|
||||
const prices = derivePrices(product, pricebookEntry);
|
||||
const { bundledAddonId, isBundledAddon } = resolveBundledAddon(product);
|
||||
const base = baseProduct(product, fieldMap);
|
||||
const prices = derivePrices(product, fieldMap, pricebookEntry);
|
||||
const { bundledAddonId, isBundledAddon } = resolveBundledAddon(product, fieldMap);
|
||||
|
||||
return {
|
||||
...base,
|
||||
@ -232,14 +240,15 @@ export function mapInternetAddon(
|
||||
|
||||
export function mapSimProduct(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
fieldMap: SalesforceFieldMap,
|
||||
pricebookEntry?: SalesforcePricebookEntryRecord
|
||||
): SimCatalogProduct {
|
||||
const base = baseProduct(product);
|
||||
const prices = derivePrices(product, pricebookEntry);
|
||||
const dataSize = getStringField(product, "simDataSize");
|
||||
const planType = getStringField(product, "simPlanType");
|
||||
const hasFamilyDiscount = getBoolean(product, "simHasFamilyDiscount");
|
||||
const { bundledAddonId, isBundledAddon } = resolveBundledAddon(product);
|
||||
const base = baseProduct(product, fieldMap);
|
||||
const prices = derivePrices(product, fieldMap, pricebookEntry);
|
||||
const dataSize = getStringField(product, "simDataSize", fieldMap);
|
||||
const planType = getStringField(product, "simPlanType", fieldMap);
|
||||
const hasFamilyDiscount = getBoolean(product, "simHasFamilyDiscount", fieldMap);
|
||||
const { bundledAddonId, isBundledAddon } = resolveBundledAddon(product, fieldMap);
|
||||
|
||||
return {
|
||||
...base,
|
||||
@ -254,9 +263,10 @@ export function mapSimProduct(
|
||||
|
||||
export function mapSimActivationFee(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
fieldMap: SalesforceFieldMap,
|
||||
pricebookEntry?: SalesforcePricebookEntryRecord
|
||||
): SimActivationFeeCatalogItem {
|
||||
const simProduct = mapSimProduct(product, pricebookEntry);
|
||||
const simProduct = mapSimProduct(product, fieldMap, pricebookEntry);
|
||||
|
||||
return {
|
||||
...simProduct,
|
||||
@ -268,11 +278,12 @@ export function mapSimActivationFee(
|
||||
|
||||
export function mapVpnProduct(
|
||||
product: SalesforceCatalogProductRecord,
|
||||
fieldMap: SalesforceFieldMap,
|
||||
pricebookEntry?: SalesforcePricebookEntryRecord
|
||||
): VpnCatalogProduct {
|
||||
const base = baseProduct(product);
|
||||
const prices = derivePrices(product, pricebookEntry);
|
||||
const vpnRegion = getStringField(product, "vpnRegion");
|
||||
const base = baseProduct(product, fieldMap);
|
||||
const prices = derivePrices(product, fieldMap, pricebookEntry);
|
||||
const vpnRegion = getStringField(product, "vpnRegion", fieldMap);
|
||||
|
||||
return {
|
||||
...base,
|
||||
|
||||
@ -3,6 +3,8 @@ import { OrdersController } from "./orders.controller";
|
||||
import { IntegrationsModule } from "@bff/integrations/integrations.module";
|
||||
import { MappingsModule } from "@bff/modules/id-mappings/mappings.module";
|
||||
import { UsersModule } from "@bff/modules/users/users.module";
|
||||
import { CoreConfigModule } from "@bff/core/config/config.module";
|
||||
import { DatabaseModule } from "@bff/core/database/database.module";
|
||||
|
||||
// Clean modular order services
|
||||
import { OrderValidator } from "./services/order-validator.service";
|
||||
@ -21,7 +23,7 @@ import { ProvisioningQueueService } from "./queue/provisioning.queue";
|
||||
import { ProvisioningProcessor } from "./queue/provisioning.processor";
|
||||
|
||||
@Module({
|
||||
imports: [IntegrationsModule, MappingsModule, UsersModule],
|
||||
imports: [IntegrationsModule, MappingsModule, UsersModule, CoreConfigModule, DatabaseModule],
|
||||
controllers: [OrdersController],
|
||||
providers: [
|
||||
// Order creation services (modular)
|
||||
|
||||
@ -3,7 +3,7 @@ import { Injectable, Inject } from "@nestjs/common";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { OrderFulfillmentOrchestrator } from "../services/order-fulfillment-orchestrator.service";
|
||||
import { SalesforceService } from "@bff/integrations/salesforce/salesforce.service";
|
||||
import { getSalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
import { SalesforceFieldMapService } from "@bff/core/config/field-map";
|
||||
import type { ProvisioningJobData } from "./provisioning.queue";
|
||||
import { CacheService } from "@bff/infra/cache/cache.service";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
@ -16,6 +16,7 @@ export class ProvisioningProcessor extends WorkerHost {
|
||||
constructor(
|
||||
private readonly orchestrator: OrderFulfillmentOrchestrator,
|
||||
private readonly salesforceService: SalesforceService,
|
||||
private readonly fieldMapService: SalesforceFieldMapService,
|
||||
private readonly cache: CacheService,
|
||||
private readonly config: ConfigService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
@ -33,7 +34,7 @@ export class ProvisioningProcessor extends WorkerHost {
|
||||
});
|
||||
|
||||
// Guard: Only process if Salesforce Order is currently 'Activating'
|
||||
const fields = getSalesforceFieldMap();
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
const order = await this.salesforceService.getOrder(sfOrderId);
|
||||
const status = order
|
||||
? ((Reflect.get(order, fields.order.activationStatus) as string | undefined) ?? "")
|
||||
|
||||
@ -1,10 +1,8 @@
|
||||
import { Injectable, Inject } from "@nestjs/common";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import type { OrderBusinessValidation, UserMapping } from "@customer-portal/domain";
|
||||
import { getSalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
import { SalesforceFieldMapService, type SalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
import { UsersService } from "@bff/modules/users/users.service";
|
||||
|
||||
const fieldMap = getSalesforceFieldMap();
|
||||
type OrderBuilderFieldKey =
|
||||
| "orderType"
|
||||
| "activationType"
|
||||
@ -21,7 +19,7 @@ function assignIfString(target: Record<string, unknown>, key: string, value: unk
|
||||
}
|
||||
}
|
||||
|
||||
function orderField(key: OrderBuilderFieldKey): string {
|
||||
function orderField(key: OrderBuilderFieldKey, fieldMap: SalesforceFieldMap): string {
|
||||
const fieldName = fieldMap.order[key];
|
||||
if (typeof fieldName !== "string") {
|
||||
throw new Error(`Missing Salesforce order field mapping for key ${String(key)}`);
|
||||
@ -29,16 +27,16 @@ function orderField(key: OrderBuilderFieldKey): string {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
function mnpField(key: keyof typeof fieldMap.order.mnp): string {
|
||||
const fieldName = fieldMap.order.mnp[key];
|
||||
function mnpField(key: string, fieldMap: SalesforceFieldMap): string {
|
||||
const fieldName = (fieldMap.order.mnp as Record<string, string>)[key];
|
||||
if (typeof fieldName !== "string") {
|
||||
throw new Error(`Missing Salesforce order MNP field mapping for key ${String(key)}`);
|
||||
}
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
function billingField(key: keyof typeof fieldMap.order.billing): string {
|
||||
const fieldName = fieldMap.order.billing[key];
|
||||
function billingField(key: string, fieldMap: SalesforceFieldMap): string {
|
||||
const fieldName = (fieldMap.order.billing as Record<string, string>)[key];
|
||||
if (typeof fieldName !== "string") {
|
||||
throw new Error(`Missing Salesforce order billing field mapping for key ${String(key)}`);
|
||||
}
|
||||
@ -49,6 +47,7 @@ function billingField(key: keyof typeof fieldMap.order.billing): string {
|
||||
export class OrderBuilder {
|
||||
constructor(
|
||||
@Inject(Logger) private readonly logger: Logger,
|
||||
private readonly fieldMapService: SalesforceFieldMapService,
|
||||
private readonly usersService: UsersService
|
||||
) {}
|
||||
|
||||
@ -58,6 +57,7 @@ export class OrderBuilder {
|
||||
pricebookId: string,
|
||||
userId: string
|
||||
): Promise<Record<string, unknown>> {
|
||||
const fieldMap = this.fieldMapService.getFieldMap();
|
||||
const today = new Date().toISOString().slice(0, 10);
|
||||
|
||||
const orderFields: Record<string, unknown> = {
|
||||
@ -65,79 +65,82 @@ export class OrderBuilder {
|
||||
EffectiveDate: today,
|
||||
Status: "Pending Review",
|
||||
Pricebook2Id: pricebookId,
|
||||
[orderField("orderType")]: body.orderType,
|
||||
[orderField("orderType", fieldMap)]: body.orderType,
|
||||
...(body.opportunityId ? { OpportunityId: body.opportunityId } : {}),
|
||||
};
|
||||
|
||||
this.addActivationFields(orderFields, body);
|
||||
this.addActivationFields(orderFields, body, fieldMap);
|
||||
|
||||
switch (body.orderType) {
|
||||
case "Internet":
|
||||
this.addInternetFields(orderFields, body);
|
||||
this.addInternetFields(orderFields, body, fieldMap);
|
||||
break;
|
||||
case "SIM":
|
||||
this.addSimFields(orderFields, body);
|
||||
this.addSimFields(orderFields, body, fieldMap);
|
||||
break;
|
||||
case "VPN":
|
||||
this.addVpnFields(orderFields, body);
|
||||
this.addVpnFields(orderFields, body, fieldMap);
|
||||
break;
|
||||
}
|
||||
|
||||
await this.addAddressSnapshot(orderFields, userId, body);
|
||||
await this.addAddressSnapshot(orderFields, userId, body, fieldMap);
|
||||
|
||||
return orderFields;
|
||||
}
|
||||
|
||||
private addActivationFields(
|
||||
orderFields: Record<string, unknown>,
|
||||
body: OrderBusinessValidation
|
||||
body: OrderBusinessValidation,
|
||||
fieldMap: SalesforceFieldMap
|
||||
): void {
|
||||
const config = body.configurations || {};
|
||||
|
||||
assignIfString(orderFields, orderField("activationType"), config.activationType);
|
||||
assignIfString(orderFields, orderField("activationScheduledAt"), config.scheduledAt);
|
||||
orderFields[orderField("activationStatus")] = "Not Started";
|
||||
assignIfString(orderFields, orderField("activationType", fieldMap), config.activationType);
|
||||
assignIfString(orderFields, orderField("activationScheduledAt", fieldMap), config.scheduledAt);
|
||||
orderFields[orderField("activationStatus", fieldMap)] = "Not Started";
|
||||
}
|
||||
|
||||
private addInternetFields(
|
||||
orderFields: Record<string, unknown>,
|
||||
body: OrderBusinessValidation
|
||||
body: OrderBusinessValidation,
|
||||
fieldMap: SalesforceFieldMap
|
||||
): void {
|
||||
const config = body.configurations || {};
|
||||
assignIfString(orderFields, orderField("accessMode"), config.accessMode);
|
||||
assignIfString(orderFields, orderField("accessMode", fieldMap), config.accessMode);
|
||||
}
|
||||
|
||||
private addSimFields(orderFields: Record<string, unknown>, body: OrderBusinessValidation): void {
|
||||
private addSimFields(orderFields: Record<string, unknown>, body: OrderBusinessValidation, fieldMap: SalesforceFieldMap): void {
|
||||
const config = body.configurations || {};
|
||||
assignIfString(orderFields, orderField("simType"), config.simType);
|
||||
assignIfString(orderFields, orderField("eid"), config.eid);
|
||||
assignIfString(orderFields, orderField("simType", fieldMap), config.simType);
|
||||
assignIfString(orderFields, orderField("eid", fieldMap), config.eid);
|
||||
|
||||
if (config.isMnp === "true") {
|
||||
orderFields[mnpField("application")] = true;
|
||||
assignIfString(orderFields, mnpField("reservationNumber"), config.mnpNumber);
|
||||
assignIfString(orderFields, mnpField("expiryDate"), config.mnpExpiry);
|
||||
assignIfString(orderFields, mnpField("phoneNumber"), config.mnpPhone);
|
||||
assignIfString(orderFields, mnpField("mvnoAccountNumber"), config.mvnoAccountNumber);
|
||||
assignIfString(orderFields, mnpField("portingLastName"), config.portingLastName);
|
||||
assignIfString(orderFields, mnpField("portingFirstName"), config.portingFirstName);
|
||||
orderFields[mnpField("application", fieldMap)] = true;
|
||||
assignIfString(orderFields, mnpField("reservationNumber", fieldMap), config.mnpNumber);
|
||||
assignIfString(orderFields, mnpField("expiryDate", fieldMap), config.mnpExpiry);
|
||||
assignIfString(orderFields, mnpField("phoneNumber", fieldMap), config.mnpPhone);
|
||||
assignIfString(orderFields, mnpField("mvnoAccountNumber", fieldMap), config.mvnoAccountNumber);
|
||||
assignIfString(orderFields, mnpField("portingLastName", fieldMap), config.portingLastName);
|
||||
assignIfString(orderFields, mnpField("portingFirstName", fieldMap), config.portingFirstName);
|
||||
assignIfString(
|
||||
orderFields,
|
||||
mnpField("portingLastNameKatakana"),
|
||||
mnpField("portingLastNameKatakana", fieldMap),
|
||||
config.portingLastNameKatakana
|
||||
);
|
||||
assignIfString(
|
||||
orderFields,
|
||||
mnpField("portingFirstNameKatakana"),
|
||||
mnpField("portingFirstNameKatakana", fieldMap),
|
||||
config.portingFirstNameKatakana
|
||||
);
|
||||
assignIfString(orderFields, mnpField("portingGender"), config.portingGender);
|
||||
assignIfString(orderFields, mnpField("portingDateOfBirth"), config.portingDateOfBirth);
|
||||
assignIfString(orderFields, mnpField("portingGender", fieldMap), config.portingGender);
|
||||
assignIfString(orderFields, mnpField("portingDateOfBirth", fieldMap), config.portingDateOfBirth);
|
||||
}
|
||||
}
|
||||
|
||||
private addVpnFields(
|
||||
_orderFields: Record<string, unknown>,
|
||||
_body: OrderBusinessValidation
|
||||
_body: OrderBusinessValidation,
|
||||
_fieldMap: SalesforceFieldMap
|
||||
): void {
|
||||
// No additional fields for VPN orders at this time.
|
||||
}
|
||||
@ -145,7 +148,8 @@ export class OrderBuilder {
|
||||
private async addAddressSnapshot(
|
||||
orderFields: Record<string, unknown>,
|
||||
userId: string,
|
||||
body: OrderBusinessValidation
|
||||
body: OrderBusinessValidation,
|
||||
fieldMap: SalesforceFieldMap
|
||||
): Promise<void> {
|
||||
try {
|
||||
const address = await this.usersService.getAddress(userId);
|
||||
@ -160,17 +164,17 @@ export class OrderBuilder {
|
||||
typeof addressToUse?.streetLine2 === "string" ? addressToUse.streetLine2 : "";
|
||||
const fullStreet = [street, streetLine2].filter(Boolean).join(", ");
|
||||
|
||||
orderFields[billingField("street")] = fullStreet;
|
||||
orderFields[billingField("city")] =
|
||||
orderFields[billingField("street", fieldMap)] = fullStreet;
|
||||
orderFields[billingField("city", fieldMap)] =
|
||||
typeof addressToUse?.city === "string" ? addressToUse.city : "";
|
||||
orderFields[billingField("state")] =
|
||||
orderFields[billingField("state", fieldMap)] =
|
||||
typeof addressToUse?.state === "string" ? addressToUse.state : "";
|
||||
orderFields[billingField("postalCode")] =
|
||||
orderFields[billingField("postalCode", fieldMap)] =
|
||||
typeof addressToUse?.postalCode === "string" ? addressToUse.postalCode : "";
|
||||
orderFields[billingField("country")] =
|
||||
orderFields[billingField("country", fieldMap)] =
|
||||
typeof addressToUse?.country === "string" ? addressToUse.country : "";
|
||||
|
||||
orderFields[orderField("addressChanged")] = addressChanged;
|
||||
orderFields[orderField("addressChanged", fieldMap)] = addressChanged;
|
||||
|
||||
if (addressChanged) {
|
||||
this.logger.log({ userId }, "Customer updated address during checkout");
|
||||
|
||||
@ -13,8 +13,9 @@ import {
|
||||
import { OrderWhmcsMapper, OrderItemMappingResult } from "./order-whmcs-mapper.service";
|
||||
import { OrderFulfillmentErrorService } from "./order-fulfillment-error.service";
|
||||
import { SimFulfillmentService } from "./sim-fulfillment.service";
|
||||
import { DistributedTransactionService } from "@bff/core/database/services/distributed-transaction.service";
|
||||
import { getErrorMessage } from "@bff/core/utils/error.util";
|
||||
import { getSalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
import { SalesforceFieldMapService } from "@bff/core/config/field-map";
|
||||
import type { OrderDetailsResponse } from "@customer-portal/domain";
|
||||
import type { FulfillmentOrderDetails, FulfillmentOrderItem } from "../types/fulfillment.types";
|
||||
|
||||
@ -44,22 +45,261 @@ export interface OrderFulfillmentContext {
|
||||
export class OrderFulfillmentOrchestrator {
|
||||
constructor(
|
||||
@Inject(Logger) private readonly logger: Logger,
|
||||
private readonly fieldMapService: SalesforceFieldMapService,
|
||||
private readonly salesforceService: SalesforceService,
|
||||
private readonly whmcsOrderService: WhmcsOrderService,
|
||||
private readonly orderOrchestrator: OrderOrchestrator,
|
||||
private readonly orderFulfillmentValidator: OrderFulfillmentValidator,
|
||||
private readonly orderWhmcsMapper: OrderWhmcsMapper,
|
||||
private readonly orderFulfillmentErrorService: OrderFulfillmentErrorService,
|
||||
private readonly simFulfillmentService: SimFulfillmentService
|
||||
private readonly simFulfillmentService: SimFulfillmentService,
|
||||
private readonly distributedTransactionService: DistributedTransactionService
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Execute complete fulfillment workflow
|
||||
* Execute complete fulfillment workflow with distributed transaction support
|
||||
*/
|
||||
async executeFulfillment(
|
||||
sfOrderId: string,
|
||||
payload: Record<string, unknown>,
|
||||
idempotencyKey: string
|
||||
): Promise<OrderFulfillmentContext> {
|
||||
return this.executeFulfillmentWithTransactions(sfOrderId, payload, idempotencyKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute fulfillment workflow using distributed transactions for atomicity
|
||||
*/
|
||||
private async executeFulfillmentWithTransactions(
|
||||
sfOrderId: string,
|
||||
payload: Record<string, unknown>,
|
||||
idempotencyKey: string
|
||||
): Promise<OrderFulfillmentContext> {
|
||||
const context: OrderFulfillmentContext = {
|
||||
sfOrderId,
|
||||
idempotencyKey,
|
||||
validation: null,
|
||||
steps: this.initializeSteps(
|
||||
typeof payload.orderType === "string" ? payload.orderType : "Unknown"
|
||||
),
|
||||
};
|
||||
|
||||
this.logger.log("Starting transactional fulfillment orchestration", {
|
||||
sfOrderId,
|
||||
idempotencyKey,
|
||||
});
|
||||
|
||||
// Step 1: Validation (no rollback needed)
|
||||
try {
|
||||
context.validation = await this.orderFulfillmentValidator.validateFulfillmentRequest(
|
||||
sfOrderId,
|
||||
idempotencyKey
|
||||
);
|
||||
|
||||
if (context.validation.isAlreadyProvisioned) {
|
||||
this.logger.log("Order already provisioned, skipping fulfillment", { sfOrderId });
|
||||
return context;
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error("Fulfillment validation failed", {
|
||||
sfOrderId,
|
||||
error: getErrorMessage(error)
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Step 2: Get order details (no rollback needed)
|
||||
try {
|
||||
const orderDetails = await this.orderOrchestrator.getOrder(sfOrderId);
|
||||
if (!orderDetails) {
|
||||
throw new Error("Order details could not be retrieved.");
|
||||
}
|
||||
context.orderDetails = this.mapOrderDetails(orderDetails);
|
||||
} catch (error) {
|
||||
this.logger.error("Failed to get order details", {
|
||||
sfOrderId,
|
||||
error: getErrorMessage(error)
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Step 3: Execute the main fulfillment workflow as a distributed transaction
|
||||
const fulfillmentResult = await this.distributedTransactionService.executeDistributedTransaction([
|
||||
{
|
||||
id: 'sf_status_update',
|
||||
description: 'Update Salesforce order status to Activating',
|
||||
execute: async () => {
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
return await this.salesforceService.updateOrder({
|
||||
Id: sfOrderId,
|
||||
[fields.order.activationStatus]: "Activating",
|
||||
});
|
||||
},
|
||||
rollback: async () => {
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
await this.salesforceService.updateOrder({
|
||||
Id: sfOrderId,
|
||||
[fields.order.activationStatus]: "Failed",
|
||||
});
|
||||
},
|
||||
critical: true
|
||||
},
|
||||
{
|
||||
id: 'mapping',
|
||||
description: 'Map OrderItems to WHMCS format',
|
||||
execute: async () => {
|
||||
if (!context.orderDetails) {
|
||||
throw new Error("Order details are required for mapping");
|
||||
}
|
||||
return this.orderWhmcsMapper.mapOrderItemsToWhmcs(
|
||||
context.orderDetails.items
|
||||
);
|
||||
},
|
||||
critical: true
|
||||
},
|
||||
{
|
||||
id: 'whmcs_create',
|
||||
description: 'Create order in WHMCS',
|
||||
execute: async () => {
|
||||
const mappingResult = fulfillmentResult.stepResults?.mapping;
|
||||
if (!mappingResult) {
|
||||
throw new Error("Mapping result is not available");
|
||||
}
|
||||
|
||||
const orderNotes = this.orderWhmcsMapper.createOrderNotes(
|
||||
sfOrderId,
|
||||
`Provisioned from Salesforce Order ${sfOrderId}`
|
||||
);
|
||||
|
||||
return await this.whmcsOrderService.addOrder({
|
||||
clientId: context.validation!.clientId,
|
||||
items: mappingResult.whmcsItems,
|
||||
paymentMethod: "stripe",
|
||||
promoCode: "1st Month Free (Monthly Plan)",
|
||||
sfOrderId,
|
||||
notes: orderNotes,
|
||||
noinvoiceemail: true,
|
||||
noemail: true,
|
||||
});
|
||||
},
|
||||
rollback: async () => {
|
||||
const createResult = fulfillmentResult.stepResults?.whmcs_create;
|
||||
if (createResult?.orderId) {
|
||||
// Note: WHMCS doesn't have an automated cancel API
|
||||
// Manual intervention required for order cleanup
|
||||
this.logger.error("WHMCS order created but fulfillment failed - manual cleanup required", {
|
||||
orderId: createResult.orderId,
|
||||
sfOrderId,
|
||||
action: "MANUAL_CLEANUP_REQUIRED"
|
||||
});
|
||||
}
|
||||
},
|
||||
critical: true
|
||||
},
|
||||
{
|
||||
id: 'whmcs_accept',
|
||||
description: 'Accept/provision order in WHMCS',
|
||||
execute: async () => {
|
||||
const createResult = fulfillmentResult.stepResults?.whmcs_create;
|
||||
if (!createResult?.orderId) {
|
||||
throw new Error("WHMCS order ID missing before acceptance step");
|
||||
}
|
||||
|
||||
return await this.whmcsOrderService.acceptOrder(
|
||||
createResult.orderId,
|
||||
sfOrderId
|
||||
);
|
||||
},
|
||||
rollback: async () => {
|
||||
const acceptResult = fulfillmentResult.stepResults?.whmcs_accept;
|
||||
if (acceptResult?.orderId) {
|
||||
// Note: WHMCS doesn't have an automated cancel API for accepted orders
|
||||
// Manual intervention required for service termination
|
||||
this.logger.error("WHMCS order accepted but fulfillment failed - manual cleanup required", {
|
||||
orderId: acceptResult.orderId,
|
||||
serviceIds: acceptResult.serviceIds,
|
||||
sfOrderId,
|
||||
action: "MANUAL_SERVICE_TERMINATION_REQUIRED"
|
||||
});
|
||||
}
|
||||
},
|
||||
critical: true
|
||||
},
|
||||
{
|
||||
id: 'sim_fulfillment',
|
||||
description: 'SIM-specific fulfillment (if applicable)',
|
||||
execute: async () => {
|
||||
if (context.orderDetails?.orderType === "SIM") {
|
||||
const configurations = this.extractConfigurations(payload.configurations);
|
||||
await this.simFulfillmentService.fulfillSimOrder({
|
||||
orderDetails: context.orderDetails,
|
||||
configurations,
|
||||
});
|
||||
return { completed: true };
|
||||
}
|
||||
return { skipped: true };
|
||||
},
|
||||
critical: false // SIM fulfillment failure shouldn't rollback the entire order
|
||||
},
|
||||
{
|
||||
id: 'sf_success_update',
|
||||
description: 'Update Salesforce with success',
|
||||
execute: async () => {
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
const whmcsResult = fulfillmentResult.stepResults?.whmcs_accept;
|
||||
|
||||
return await this.salesforceService.updateOrder({
|
||||
Id: sfOrderId,
|
||||
Status: "Completed",
|
||||
[fields.order.activationStatus]: "Activated",
|
||||
[fields.order.whmcsOrderId]: whmcsResult?.orderId?.toString(),
|
||||
});
|
||||
},
|
||||
rollback: async () => {
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
await this.salesforceService.updateOrder({
|
||||
Id: sfOrderId,
|
||||
[fields.order.activationStatus]: "Failed",
|
||||
});
|
||||
},
|
||||
critical: true
|
||||
}
|
||||
], {
|
||||
description: `Order fulfillment for ${sfOrderId}`,
|
||||
timeout: 300000, // 5 minutes
|
||||
continueOnNonCriticalFailure: true
|
||||
});
|
||||
|
||||
if (!fulfillmentResult.success) {
|
||||
this.logger.error("Fulfillment transaction failed", {
|
||||
sfOrderId,
|
||||
error: fulfillmentResult.error,
|
||||
stepsExecuted: fulfillmentResult.stepsExecuted,
|
||||
stepsRolledBack: fulfillmentResult.stepsRolledBack
|
||||
});
|
||||
throw new Error(fulfillmentResult.error || "Fulfillment transaction failed");
|
||||
}
|
||||
|
||||
// Update context with results
|
||||
context.mappingResult = fulfillmentResult.stepResults?.mapping;
|
||||
context.whmcsResult = fulfillmentResult.stepResults?.whmcs_accept;
|
||||
|
||||
this.logger.log("Transactional fulfillment completed successfully", {
|
||||
sfOrderId,
|
||||
stepsExecuted: fulfillmentResult.stepsExecuted,
|
||||
duration: fulfillmentResult.duration
|
||||
});
|
||||
|
||||
return context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy fulfillment method (kept for backward compatibility)
|
||||
*/
|
||||
private async executeFulfillmentLegacy(
|
||||
sfOrderId: string,
|
||||
payload: Record<string, unknown>,
|
||||
idempotencyKey: string
|
||||
): Promise<OrderFulfillmentContext> {
|
||||
const context: OrderFulfillmentContext = {
|
||||
sfOrderId,
|
||||
@ -104,7 +344,7 @@ export class OrderFulfillmentOrchestrator {
|
||||
|
||||
// Step 2: Update Salesforce status to "Activating"
|
||||
await this.executeStep(context, "sf_status_update", async () => {
|
||||
const fields = getSalesforceFieldMap();
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
await this.salesforceService.updateOrder({
|
||||
Id: sfOrderId,
|
||||
[fields.order.activationStatus]: "Activating",
|
||||
@ -206,7 +446,7 @@ export class OrderFulfillmentOrchestrator {
|
||||
|
||||
// Step 8: Update Salesforce with success
|
||||
await this.executeStep(context, "sf_success_update", async () => {
|
||||
const fields = getSalesforceFieldMap();
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
await this.salesforceService.updateOrder({
|
||||
Id: sfOrderId,
|
||||
Status: "Completed",
|
||||
@ -401,7 +641,7 @@ export class OrderFulfillmentOrchestrator {
|
||||
): Promise<void> {
|
||||
const errorCode = this.orderFulfillmentErrorService.determineErrorCode(error);
|
||||
const userMessage = error.message;
|
||||
const fields = getSalesforceFieldMap();
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
|
||||
this.logger.error("Fulfillment orchestration failed", {
|
||||
sfOrderId: context.sfOrderId,
|
||||
|
||||
@ -5,9 +5,7 @@ import { WhmcsPaymentService } from "@bff/integrations/whmcs/services/whmcs-paym
|
||||
import { MappingsService } from "@bff/modules/id-mappings/mappings.service";
|
||||
import { getErrorMessage } from "@bff/core/utils/error.util";
|
||||
import type { SalesforceOrderRecord } from "@customer-portal/domain";
|
||||
import { getSalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
|
||||
const fieldMap = getSalesforceFieldMap();
|
||||
import { SalesforceFieldMapService, type SalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
type OrderStringFieldKey = "activationStatus";
|
||||
|
||||
export interface OrderFulfillmentValidationResult {
|
||||
@ -25,6 +23,7 @@ export interface OrderFulfillmentValidationResult {
|
||||
export class OrderFulfillmentValidator {
|
||||
constructor(
|
||||
@Inject(Logger) private readonly logger: Logger,
|
||||
private readonly fieldMapService: SalesforceFieldMapService,
|
||||
private readonly salesforceService: SalesforceService,
|
||||
private readonly whmcsPaymentService: WhmcsPaymentService,
|
||||
private readonly mappingsService: MappingsService
|
||||
@ -48,6 +47,7 @@ export class OrderFulfillmentValidator {
|
||||
const sfOrder = await this.validateSalesforceOrder(sfOrderId);
|
||||
|
||||
// 2. Check if already provisioned (idempotency)
|
||||
const fieldMap = this.fieldMapService.getFieldMap();
|
||||
const rawWhmcs = Reflect.get(sfOrder, fieldMap.order.whmcsOrderId) as unknown;
|
||||
const existingWhmcsOrderId = typeof rawWhmcs === "string" ? rawWhmcs : undefined;
|
||||
if (existingWhmcsOrderId) {
|
||||
@ -113,10 +113,11 @@ export class OrderFulfillmentValidator {
|
||||
throw new BadRequestException(`Cannot provision cancelled order ${sfOrderId}`);
|
||||
}
|
||||
|
||||
const fieldMap = this.fieldMapService.getFieldMap();
|
||||
this.logger.log("Salesforce order validated", {
|
||||
sfOrderId,
|
||||
status: order.Status,
|
||||
activationStatus: pickOrderString(order, "activationStatus"),
|
||||
activationStatus: pickOrderString(order, "activationStatus", fieldMap),
|
||||
accountId: order.AccountId,
|
||||
});
|
||||
|
||||
@ -158,7 +159,8 @@ export class OrderFulfillmentValidator {
|
||||
|
||||
function pickOrderString(
|
||||
order: SalesforceOrderRecord,
|
||||
key: OrderStringFieldKey
|
||||
key: OrderStringFieldKey,
|
||||
fieldMap: SalesforceFieldMap
|
||||
): string | undefined {
|
||||
const field = fieldMap.order[key];
|
||||
if (typeof field !== "string") {
|
||||
|
||||
@ -14,15 +14,11 @@ import {
|
||||
type SalesforceQueryResult,
|
||||
type SalesforceProduct2Record,
|
||||
} from "@customer-portal/domain";
|
||||
import {
|
||||
getSalesforceFieldMap,
|
||||
getOrderQueryFields,
|
||||
getOrderItemProduct2Select,
|
||||
} from "@bff/core/config/field-map";
|
||||
import { SalesforceFieldMapService, type SalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
import { assertSalesforceId, buildInClause } from "@bff/integrations/salesforce/utils/soql.util";
|
||||
import { getErrorMessage } from "@bff/core/utils/error.util";
|
||||
|
||||
const fieldMap = getSalesforceFieldMap();
|
||||
// fieldMap will be injected via service
|
||||
type OrderFieldKey =
|
||||
| "orderType"
|
||||
| "activationType"
|
||||
@ -33,7 +29,11 @@ type OrderFieldKey =
|
||||
type OrderDetailsResponse = z.infer<typeof orderDetailsSchema>;
|
||||
type OrderSummaryResponse = z.infer<typeof orderSummarySchema>;
|
||||
|
||||
function getOrderStringField(order: SalesforceOrderRecord, key: OrderFieldKey): string | undefined {
|
||||
function getOrderStringField(
|
||||
order: SalesforceOrderRecord,
|
||||
key: OrderFieldKey,
|
||||
fieldMap: SalesforceFieldMap
|
||||
): string | undefined {
|
||||
const fieldName = fieldMap.order[key];
|
||||
if (typeof fieldName !== "string") {
|
||||
return undefined;
|
||||
@ -44,7 +44,8 @@ function getOrderStringField(order: SalesforceOrderRecord, key: OrderFieldKey):
|
||||
|
||||
function pickProductString(
|
||||
product: SalesforceProduct2Record | null | undefined,
|
||||
key: keyof typeof fieldMap.product
|
||||
key: keyof SalesforceFieldMap["product"],
|
||||
fieldMap: SalesforceFieldMap
|
||||
): string | undefined {
|
||||
if (!product) return undefined;
|
||||
const fieldName = fieldMap.product[key] as keyof SalesforceProduct2Record;
|
||||
@ -52,7 +53,10 @@ function pickProductString(
|
||||
return typeof raw === "string" ? raw : undefined;
|
||||
}
|
||||
|
||||
function mapOrderItemRecord(record: SalesforceOrderItemRecord): ParsedOrderItemDetails {
|
||||
function mapOrderItemRecord(
|
||||
record: SalesforceOrderItemRecord,
|
||||
fieldMap: SalesforceFieldMap
|
||||
): ParsedOrderItemDetails {
|
||||
const product = record.PricebookEntry?.Product2 ?? undefined;
|
||||
|
||||
return {
|
||||
@ -65,12 +69,12 @@ function mapOrderItemRecord(record: SalesforceOrderItemRecord): ParsedOrderItemD
|
||||
product: {
|
||||
id: product?.Id,
|
||||
name: product?.Name,
|
||||
sku: pickProductString(product, "sku"),
|
||||
itemClass: pickProductString(product, "itemClass"),
|
||||
whmcsProductId: pickProductString(product, "whmcsProductId"),
|
||||
internetOfferingType: pickProductString(product, "internetOfferingType"),
|
||||
internetPlanTier: pickProductString(product, "internetPlanTier"),
|
||||
vpnRegion: pickProductString(product, "vpnRegion"),
|
||||
sku: pickProductString(product, "sku", fieldMap),
|
||||
itemClass: pickProductString(product, "itemClass", fieldMap),
|
||||
whmcsProductId: pickProductString(product, "whmcsProductId", fieldMap),
|
||||
internetOfferingType: pickProductString(product, "internetOfferingType", fieldMap),
|
||||
internetPlanTier: pickProductString(product, "internetPlanTier", fieldMap),
|
||||
vpnRegion: pickProductString(product, "vpnRegion", fieldMap),
|
||||
},
|
||||
};
|
||||
}
|
||||
@ -115,6 +119,7 @@ export class OrderOrchestrator {
|
||||
constructor(
|
||||
@Inject(Logger) private readonly logger: Logger,
|
||||
private readonly sf: SalesforceConnection,
|
||||
private readonly fieldMapService: SalesforceFieldMapService,
|
||||
private readonly orderValidator: OrderValidator,
|
||||
private readonly orderBuilder: OrderBuilder,
|
||||
private readonly orderItemBuilder: OrderItemBuilder
|
||||
@ -194,8 +199,12 @@ export class OrderOrchestrator {
|
||||
const safeOrderId = assertSalesforceId(orderId, "orderId");
|
||||
this.logger.log({ orderId: safeOrderId }, "Fetching order details with items");
|
||||
|
||||
const fieldMap = this.fieldMapService.getFieldMap();
|
||||
const orderQueryFields = this.fieldMapService.getOrderQueryFields();
|
||||
const orderItemProduct2Select = this.fieldMapService.getOrderItemProduct2Select();
|
||||
|
||||
const orderSoql = `
|
||||
SELECT ${getOrderQueryFields()}, OrderNumber, TotalAmount,
|
||||
SELECT ${orderQueryFields}, OrderNumber, TotalAmount,
|
||||
Account.Name, CreatedDate, LastModifiedDate
|
||||
FROM Order
|
||||
WHERE Id = '${safeOrderId}'
|
||||
@ -205,7 +214,7 @@ export class OrderOrchestrator {
|
||||
const orderItemsSoql = `
|
||||
SELECT Id, OrderId, Quantity, UnitPrice, TotalPrice,
|
||||
PricebookEntry.Id,
|
||||
${getOrderItemProduct2Select()}
|
||||
${orderItemProduct2Select}
|
||||
FROM OrderItem
|
||||
WHERE OrderId = '${safeOrderId}'
|
||||
ORDER BY CreatedDate ASC
|
||||
@ -224,7 +233,9 @@ export class OrderOrchestrator {
|
||||
return null;
|
||||
}
|
||||
|
||||
const orderItems = (itemsResult.records ?? []).map(mapOrderItemRecord);
|
||||
const orderItems = (itemsResult.records ?? []).map(record =>
|
||||
mapOrderItemRecord(record, fieldMap)
|
||||
);
|
||||
|
||||
this.logger.log(
|
||||
{ orderId: safeOrderId, itemCount: orderItems.length },
|
||||
@ -236,16 +247,16 @@ export class OrderOrchestrator {
|
||||
orderNumber: order.OrderNumber,
|
||||
status: order.Status,
|
||||
accountId: order.AccountId,
|
||||
orderType: getOrderStringField(order, "orderType") ?? order.Type,
|
||||
orderType: getOrderStringField(order, "orderType", fieldMap) ?? order.Type,
|
||||
effectiveDate: order.EffectiveDate,
|
||||
totalAmount: order.TotalAmount ?? 0,
|
||||
accountName: order.Account?.Name,
|
||||
createdDate: order.CreatedDate,
|
||||
lastModifiedDate: order.LastModifiedDate,
|
||||
activationType: getOrderStringField(order, "activationType"),
|
||||
activationStatus: getOrderStringField(order, "activationStatus"),
|
||||
scheduledAt: getOrderStringField(order, "activationScheduledAt"),
|
||||
whmcsOrderId: getOrderStringField(order, "whmcsOrderId"),
|
||||
activationType: getOrderStringField(order, "activationType", fieldMap),
|
||||
activationStatus: getOrderStringField(order, "activationStatus", fieldMap),
|
||||
scheduledAt: getOrderStringField(order, "activationScheduledAt", fieldMap),
|
||||
whmcsOrderId: getOrderStringField(order, "whmcsOrderId", fieldMap),
|
||||
items: orderItems.map(detail => ({
|
||||
id: detail.id,
|
||||
orderId: detail.orderId,
|
||||
@ -291,8 +302,12 @@ export class OrderOrchestrator {
|
||||
return [];
|
||||
}
|
||||
|
||||
const fieldMap = this.fieldMapService.getFieldMap();
|
||||
const orderQueryFields = this.fieldMapService.getOrderQueryFields();
|
||||
const orderItemProduct2Select = this.fieldMapService.getOrderItemProduct2Select();
|
||||
|
||||
const ordersSoql = `
|
||||
SELECT ${getOrderQueryFields()}, OrderNumber, TotalAmount, CreatedDate, LastModifiedDate
|
||||
SELECT ${orderQueryFields}, OrderNumber, TotalAmount, CreatedDate, LastModifiedDate
|
||||
FROM Order
|
||||
WHERE AccountId = '${sfAccountId}'
|
||||
ORDER BY CreatedDate DESC
|
||||
@ -321,7 +336,7 @@ export class OrderOrchestrator {
|
||||
const orderIdsClause = buildInClause(rawOrderIds, "orderIds");
|
||||
const itemsSoql = `
|
||||
SELECT Id, OrderId, Quantity, UnitPrice, TotalPrice,
|
||||
${getOrderItemProduct2Select()}
|
||||
${orderItemProduct2Select}
|
||||
FROM OrderItem
|
||||
WHERE OrderId IN (${orderIdsClause})
|
||||
ORDER BY OrderId, CreatedDate ASC
|
||||
@ -333,7 +348,7 @@ export class OrderOrchestrator {
|
||||
const allItems = itemsResult.records || [];
|
||||
|
||||
const itemsByOrder = allItems.reduce<Record<string, OrderItemSummary[]>>((acc, record) => {
|
||||
const details = mapOrderItemRecord(record);
|
||||
const details = mapOrderItemRecord(record, fieldMap);
|
||||
if (!acc[details.orderId]) acc[details.orderId] = [];
|
||||
acc[details.orderId].push(toOrderItemSummary(details));
|
||||
return acc;
|
||||
@ -345,12 +360,12 @@ export class OrderOrchestrator {
|
||||
id: order.Id,
|
||||
orderNumber: order.OrderNumber,
|
||||
status: order.Status,
|
||||
orderType: getOrderStringField(order, "orderType") ?? order.Type,
|
||||
orderType: getOrderStringField(order, "orderType", fieldMap) ?? order.Type,
|
||||
effectiveDate: order.EffectiveDate,
|
||||
totalAmount: order.TotalAmount ?? 0,
|
||||
createdDate: order.CreatedDate,
|
||||
lastModifiedDate: order.LastModifiedDate,
|
||||
whmcsOrderId: getOrderStringField(order, "whmcsOrderId"),
|
||||
whmcsOrderId: getOrderStringField(order, "whmcsOrderId", fieldMap),
|
||||
itemsSummary: itemsByOrder[order.Id] ?? [],
|
||||
})
|
||||
);
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
import { Inject, Injectable, NotFoundException } from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { SalesforceConnection } from "@bff/integrations/salesforce/services/salesforce-connection.service";
|
||||
import { getSalesforceFieldMap } from "@bff/core/config/field-map";
|
||||
import { SalesforceFieldMapService } from "@bff/core/config/field-map";
|
||||
import { getStringField } from "@bff/modules/catalog/utils/salesforce-product.mapper";
|
||||
import type {
|
||||
SalesforcePricebookEntryRecord,
|
||||
@ -31,11 +32,13 @@ export class OrderPricebookService {
|
||||
|
||||
constructor(
|
||||
private readonly sf: SalesforceConnection,
|
||||
private readonly fieldMapService: SalesforceFieldMapService,
|
||||
private readonly configService: ConfigService,
|
||||
@Inject(Logger) private readonly logger: Logger
|
||||
) {}
|
||||
|
||||
async findPortalPricebookId(): Promise<string> {
|
||||
const name = process.env.PORTAL_PRICEBOOK_NAME || "Portal";
|
||||
const name = this.configService.get<string>("PORTAL_PRICEBOOK_NAME")!;
|
||||
const soql = `SELECT Id, Name FROM Pricebook2 WHERE IsActive = true AND Name LIKE '%${sanitizeSoqlLiteral(name)}%' LIMIT 1`;
|
||||
|
||||
try {
|
||||
@ -77,7 +80,7 @@ export class OrderPricebookService {
|
||||
return new Map();
|
||||
}
|
||||
|
||||
const fields = getSalesforceFieldMap();
|
||||
const fields = this.fieldMapService.getFieldMap();
|
||||
const meta = new Map<string, PricebookProductMeta>();
|
||||
|
||||
for (let i = 0; i < uniqueSkus.length; i += this.chunkSize) {
|
||||
@ -100,7 +103,7 @@ export class OrderPricebookService {
|
||||
|
||||
for (const record of res.records ?? []) {
|
||||
const product = record.Product2 ?? undefined;
|
||||
const sku = product ? getStringField(product, "sku") : undefined;
|
||||
const sku = product ? getStringField(product, "sku", fields) : undefined;
|
||||
if (!sku) continue;
|
||||
|
||||
const normalizedSku = sku.trim().toUpperCase();
|
||||
@ -109,12 +112,12 @@ export class OrderPricebookService {
|
||||
pricebookEntryId: assertSalesforceId(record.Id ?? undefined, "pricebookEntryId"),
|
||||
product2Id: record.Product2Id ?? undefined,
|
||||
unitPrice: typeof record.UnitPrice === "number" ? record.UnitPrice : undefined,
|
||||
itemClass: product ? getStringField(product, "itemClass") : undefined,
|
||||
itemClass: product ? getStringField(product, "itemClass", fields) : undefined,
|
||||
internetOfferingType: product
|
||||
? getStringField(product, "internetOfferingType")
|
||||
? getStringField(product, "internetOfferingType", fields)
|
||||
: undefined,
|
||||
internetPlanTier: product ? getStringField(product, "internetPlanTier") : undefined,
|
||||
vpnRegion: product ? getStringField(product, "vpnRegion") : undefined,
|
||||
internetPlanTier: product ? getStringField(product, "internetPlanTier", fields) : undefined,
|
||||
vpnRegion: product ? getStringField(product, "vpnRegion", fields) : undefined,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@ -4,13 +4,14 @@ import type { UpdateAddressRequest } from "@customer-portal/domain";
|
||||
import { Injectable, Inject, NotFoundException, BadRequestException } from "@nestjs/common";
|
||||
import { Logger } from "nestjs-pino";
|
||||
import { PrismaService } from "@bff/infra/database/prisma.service";
|
||||
import { User, Activity, Address } from "@customer-portal/domain";
|
||||
import { User, Activity, Address, type AuthenticatedUser } from "@customer-portal/domain";
|
||||
import type { Subscription, Invoice } from "@customer-portal/domain";
|
||||
import type { User as PrismaUser } from "@prisma/client";
|
||||
import { WhmcsService } from "@bff/integrations/whmcs/whmcs.service";
|
||||
import { SalesforceService } from "@bff/integrations/salesforce/salesforce.service";
|
||||
|
||||
import { MappingsService } from "@bff/modules/id-mappings/mappings.service";
|
||||
import { mapPrismaUserToUserProfile } from "@bff/infra/utils/user-mapper.util";
|
||||
|
||||
// Use a subset of PrismaUser for updates
|
||||
type UserUpdateData = Partial<
|
||||
@ -107,7 +108,7 @@ export class UsersService {
|
||||
}
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<User | null> {
|
||||
async findById(id: string): Promise<AuthenticatedUser | null> {
|
||||
const validId = this.validateUserId(id);
|
||||
|
||||
try {
|
||||
@ -124,7 +125,7 @@ export class UsersService {
|
||||
error: getErrorMessage(error),
|
||||
userId: validId,
|
||||
});
|
||||
return this.toDomainUser(user);
|
||||
return mapPrismaUserToUserProfile(user);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error("Failed to find user by ID", {
|
||||
@ -134,7 +135,7 @@ export class UsersService {
|
||||
}
|
||||
}
|
||||
|
||||
async getEnhancedProfile(userId: string): Promise<User> {
|
||||
async getEnhancedProfile(userId: string): Promise<AuthenticatedUser> {
|
||||
const user = await this.prisma.user.findUnique({ where: { id: userId } });
|
||||
if (!user) throw new Error("User not found");
|
||||
|
||||
@ -190,7 +191,7 @@ export class UsersService {
|
||||
email: email || user.email,
|
||||
};
|
||||
|
||||
return this.toDomainUser(enhancedUser);
|
||||
return mapPrismaUserToUserProfile(enhancedUser);
|
||||
}
|
||||
|
||||
async create(userData: Partial<PrismaUser>): Promise<User> {
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
/* eslint-env node */
|
||||
/* no-op */
|
||||
import bundleAnalyzer from '@next/bundle-analyzer';
|
||||
|
||||
const withBundleAnalyzer = bundleAnalyzer({
|
||||
enabled: process.env.ANALYZE === 'true',
|
||||
});
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
@ -77,7 +81,15 @@ const nextConfig = {
|
||||
value:
|
||||
process.env.NODE_ENV === "development"
|
||||
? "default-src 'self'; script-src 'self' 'unsafe-eval' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; connect-src 'self' https: http://localhost:* ws://localhost:*; frame-ancestors 'none';"
|
||||
: "default-src 'self'; script-src 'self' 'unsafe-eval' 'unsafe-inline'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; connect-src 'self' https:; frame-ancestors 'none';",
|
||||
: [
|
||||
"default-src 'self'",
|
||||
"script-src 'self'",
|
||||
"style-src 'self'",
|
||||
"img-src 'self' data: https:",
|
||||
"font-src 'self' data:",
|
||||
"connect-src 'self'",
|
||||
"frame-ancestors 'none'",
|
||||
].join("; "),
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -90,10 +102,19 @@ const nextConfig = {
|
||||
removeConsole: process.env.NODE_ENV === "production",
|
||||
},
|
||||
|
||||
// Simple bundle optimization
|
||||
experimental: {
|
||||
optimizePackageImports: [
|
||||
'@heroicons/react',
|
||||
'lucide-react',
|
||||
'@tanstack/react-query',
|
||||
],
|
||||
},
|
||||
|
||||
// Keep type checking enabled; monorepo paths provide types
|
||||
typescript: { ignoreBuildErrors: false },
|
||||
|
||||
// Prefer Turbopack; no custom webpack override needed
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
export default withBundleAnalyzer(nextConfig);
|
||||
|
||||
@ -5,14 +5,17 @@
|
||||
"scripts": {
|
||||
"predev": "node ./scripts/dev-prep.mjs",
|
||||
"dev": "next dev -p ${NEXT_PORT:-3000}",
|
||||
"build": "NODE_OPTIONS=\"--max-old-space-size=4096\" next build",
|
||||
"build": "next build",
|
||||
"build:turbo": "next build --turbopack",
|
||||
"build:analyze": "ANALYZE=true next build",
|
||||
"analyze": "npm run build:analyze",
|
||||
"start": "next start -p ${NEXT_PORT:-3000}",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix",
|
||||
"type-check": "NODE_OPTIONS=\"--max-old-space-size=6144 --max-semi-space-size=256\" tsc --project tsconfig.json --noEmit",
|
||||
"type-check:watch": "NODE_OPTIONS=\"--max-old-space-size=6144 --max-semi-space-size=256\" tsc --project tsconfig.json --noEmit --watch",
|
||||
"test": "echo 'No tests yet'"
|
||||
"type-check": "tsc --project tsconfig.json --noEmit",
|
||||
"type-check:watch": "tsc --project tsconfig.json --noEmit --watch",
|
||||
"test": "echo 'No tests yet'",
|
||||
"bundle-analyze": "npm run build:analyze && npx @next/bundle-analyzer"
|
||||
},
|
||||
"dependencies": {
|
||||
"@customer-portal/logging": "workspace:*",
|
||||
@ -37,11 +40,13 @@
|
||||
"zustand": "^5.0.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@next/bundle-analyzer": "^15.5.0",
|
||||
"@tailwindcss/postcss": "^4.1.12",
|
||||
"@types/node": "^24.3.0",
|
||||
"@types/react": "^19.1.10",
|
||||
"@types/react-dom": "^19.1.7",
|
||||
"tailwindcss": "^4.1.12",
|
||||
"typescript": "^5.9.2"
|
||||
"typescript": "^5.9.2",
|
||||
"webpack-bundle-analyzer": "^4.10.2"
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,12 +2,13 @@
|
||||
|
||||
import { useState, useEffect, useMemo } from "react";
|
||||
import { usePathname, useRouter } from "next/navigation";
|
||||
import { useAuthStore } from "@/features/auth/services/auth.store";
|
||||
import { useAuthStore, useAuthSession } from "@/features/auth/services/auth.store";
|
||||
import { useActiveSubscriptions } from "@/features/subscriptions/hooks";
|
||||
import { accountService } from "@/features/account/services/account.service";
|
||||
import { Sidebar } from "./Sidebar";
|
||||
import { Header } from "./Header";
|
||||
import { computeNavigation } from "./navigation";
|
||||
import type { Subscription } from "@customer-portal/domain";
|
||||
|
||||
interface AppShellProps {
|
||||
children: React.ReactNode;
|
||||
@ -17,12 +18,15 @@ interface AppShellProps {
|
||||
|
||||
export function AppShell({ children }: AppShellProps) {
|
||||
const [sidebarOpen, setSidebarOpen] = useState(false);
|
||||
const { user, isAuthenticated, checkAuth } = useAuthStore();
|
||||
const { hydrated, hasCheckedAuth, loading } = useAuthStore();
|
||||
const { user, isAuthenticated } = useAuthSession();
|
||||
const checkAuth = useAuthStore(state => state.checkAuth);
|
||||
const hasCheckedAuth = useAuthStore(state => state.hasCheckedAuth);
|
||||
const loading = useAuthStore(state => state.loading);
|
||||
const hydrateUserProfile = useAuthStore(state => state.hydrateUserProfile);
|
||||
const pathname = usePathname();
|
||||
const router = useRouter();
|
||||
const activeSubscriptionsQuery = useActiveSubscriptions();
|
||||
const activeSubscriptions = activeSubscriptionsQuery.data ?? [];
|
||||
const activeSubscriptions: Subscription[] = activeSubscriptionsQuery.data ?? [];
|
||||
|
||||
// Initialize with a stable default to avoid hydration mismatch
|
||||
const [expandedItems, setExpandedItems] = useState<string[]>([]);
|
||||
@ -75,19 +79,11 @@ export function AppShell({ children }: AppShellProps) {
|
||||
if (!prof) {
|
||||
return;
|
||||
}
|
||||
useAuthStore.setState(state =>
|
||||
state.user
|
||||
? {
|
||||
...state,
|
||||
user: {
|
||||
...state.user,
|
||||
firstName: prof.firstName || state.user.firstName,
|
||||
lastName: prof.lastName || state.user.lastName,
|
||||
phone: prof.phone || state.user.phone,
|
||||
},
|
||||
}
|
||||
: state
|
||||
);
|
||||
hydrateUserProfile({
|
||||
firstName: prof.firstName ?? undefined,
|
||||
lastName: prof.lastName ?? undefined,
|
||||
phone: prof.phone ?? undefined,
|
||||
});
|
||||
} catch {
|
||||
// best-effort profile hydration; ignore errors
|
||||
}
|
||||
|
||||
@ -9,22 +9,22 @@ type ProfileUpdateInput = {
|
||||
|
||||
export const accountService = {
|
||||
async getProfile() {
|
||||
const response = await apiClient.GET<UserProfile>("/api/me");
|
||||
const response = await apiClient.GET<UserProfile>("/me");
|
||||
return getNullableData<UserProfile>(response);
|
||||
},
|
||||
|
||||
async updateProfile(update: ProfileUpdateInput) {
|
||||
const response = await apiClient.PATCH<UserProfile>("/api/me", { body: update });
|
||||
const response = await apiClient.PATCH<UserProfile>("/me", { body: update });
|
||||
return getDataOrThrow<UserProfile>(response, "Failed to update profile");
|
||||
},
|
||||
|
||||
async getAddress() {
|
||||
const response = await apiClient.GET<Address>("/api/me/address");
|
||||
const response = await apiClient.GET<Address>("/me/address");
|
||||
return getNullableData<Address>(response);
|
||||
},
|
||||
|
||||
async updateAddress(address: Address) {
|
||||
const response = await apiClient.PATCH<Address>("/api/me/address", { body: address });
|
||||
const response = await apiClient.PATCH<Address>("/me/address", { body: address });
|
||||
return getDataOrThrow<Address>(response, "Failed to update address");
|
||||
},
|
||||
};
|
||||
|
||||
@ -3,6 +3,7 @@ import { logger } from "@customer-portal/logging";
|
||||
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { useAuthStore } from "@/features/auth/services/auth.store";
|
||||
import { useAuthSession } from "@/features/auth/services/auth.store";
|
||||
import { Button } from "@/components/atoms/button";
|
||||
|
||||
interface SessionTimeoutWarningProps {
|
||||
@ -12,7 +13,9 @@ interface SessionTimeoutWarningProps {
|
||||
export function SessionTimeoutWarning({
|
||||
warningTime = 5, // Show warning 5 minutes before expiry (reduced since we have auto-refresh)
|
||||
}: SessionTimeoutWarningProps) {
|
||||
const { isAuthenticated, tokens, logout, checkAuth } = useAuthStore();
|
||||
const { isAuthenticated, session } = useAuthSession();
|
||||
const logout = useAuthStore(state => state.logout);
|
||||
const refreshSession = useAuthStore(state => state.refreshSession);
|
||||
const [showWarning, setShowWarning] = useState(false);
|
||||
const [timeLeft, setTimeLeft] = useState<number>(0);
|
||||
const expiryRef = useRef<number | null>(null);
|
||||
@ -20,16 +23,16 @@ export function SessionTimeoutWarning({
|
||||
const previouslyFocusedElement = useRef<HTMLElement | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isAuthenticated || !tokens?.expiresAt) {
|
||||
if (!isAuthenticated || !session.accessExpiresAt) {
|
||||
expiryRef.current = null;
|
||||
setShowWarning(false);
|
||||
setTimeLeft(0);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const expiryTime = Date.parse(tokens.expiresAt);
|
||||
const expiryTime = Date.parse(session.accessExpiresAt);
|
||||
if (Number.isNaN(expiryTime)) {
|
||||
logger.warn({ expiresAt: tokens.expiresAt }, "Invalid expiresAt on auth tokens");
|
||||
logger.warn({ expiresAt: session.accessExpiresAt }, "Invalid access token expiry");
|
||||
expiryRef.current = null;
|
||||
setShowWarning(false);
|
||||
setTimeLeft(0);
|
||||
@ -60,7 +63,7 @@ export function SessionTimeoutWarning({
|
||||
}, timeUntilWarning);
|
||||
|
||||
return () => clearTimeout(warningTimeout);
|
||||
}, [isAuthenticated, tokens?.expiresAt, warningTime, logout]);
|
||||
}, [isAuthenticated, session.accessExpiresAt, warningTime, logout]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!showWarning || !expiryRef.current) return undefined;
|
||||
@ -139,7 +142,7 @@ export function SessionTimeoutWarning({
|
||||
const handleExtendSession = () => {
|
||||
void (async () => {
|
||||
try {
|
||||
await checkAuth();
|
||||
await refreshSession();
|
||||
setShowWarning(false);
|
||||
setTimeLeft(0);
|
||||
} catch (error) {
|
||||
|
||||
@ -17,56 +17,73 @@ import type { SignupRequestInput, LoginRequestInput } from "@customer-portal/dom
|
||||
export function useAuth() {
|
||||
const router = useRouter();
|
||||
const searchParams = useSearchParams();
|
||||
const store = useAuthStore();
|
||||
const {
|
||||
isAuthenticated,
|
||||
user,
|
||||
loading,
|
||||
hasCheckedAuth,
|
||||
error,
|
||||
login: loginAction,
|
||||
signup: signupAction,
|
||||
logout: logoutAction,
|
||||
requestPasswordReset,
|
||||
resetPassword,
|
||||
changePassword,
|
||||
checkPasswordNeeded,
|
||||
linkWhmcs,
|
||||
setPassword,
|
||||
checkAuth,
|
||||
refreshSession,
|
||||
clearError,
|
||||
} = useAuthStore();
|
||||
|
||||
// Enhanced login with redirect handling
|
||||
const login = useCallback(
|
||||
async (credentials: LoginRequestInput) => {
|
||||
await store.login(credentials);
|
||||
await loginAction(credentials);
|
||||
const redirectTo = getPostLoginRedirect(searchParams);
|
||||
router.push(redirectTo);
|
||||
},
|
||||
[store, router, searchParams]
|
||||
[loginAction, router, searchParams]
|
||||
);
|
||||
|
||||
// Enhanced signup with redirect handling
|
||||
const signup = useCallback(
|
||||
async (data: SignupRequestInput) => {
|
||||
await store.signup(data);
|
||||
await signupAction(data);
|
||||
const redirectTo = getPostLoginRedirect(searchParams);
|
||||
router.push(redirectTo);
|
||||
},
|
||||
[store, router, searchParams]
|
||||
[signupAction, router, searchParams]
|
||||
);
|
||||
|
||||
// Enhanced logout with redirect
|
||||
const logout = useCallback(async () => {
|
||||
await store.logout();
|
||||
await logoutAction();
|
||||
router.push("/auth/login");
|
||||
}, [store, router]);
|
||||
}, [logoutAction, router]);
|
||||
|
||||
return {
|
||||
// State
|
||||
isAuthenticated: store.isAuthenticated,
|
||||
user: store.user,
|
||||
loading: store.loading,
|
||||
hydrated: (store as unknown as { hydrated?: boolean }).hydrated ?? false,
|
||||
hasCheckedAuth: (store as unknown as { hasCheckedAuth?: boolean }).hasCheckedAuth ?? false,
|
||||
error: store.error,
|
||||
isAuthenticated,
|
||||
user,
|
||||
loading,
|
||||
hasCheckedAuth,
|
||||
error,
|
||||
|
||||
// Actions
|
||||
login,
|
||||
signup,
|
||||
logout,
|
||||
requestPasswordReset: store.requestPasswordReset,
|
||||
resetPassword: store.resetPassword,
|
||||
changePassword: store.changePassword,
|
||||
checkPasswordNeeded: store.checkPasswordNeeded,
|
||||
linkWhmcs: store.linkWhmcs,
|
||||
setPassword: store.setPassword,
|
||||
checkAuth: store.checkAuth,
|
||||
refreshSession: store.refreshSession,
|
||||
clearError: store.clearError,
|
||||
requestPasswordReset,
|
||||
resetPassword,
|
||||
changePassword,
|
||||
checkPasswordNeeded,
|
||||
linkWhmcs,
|
||||
setPassword,
|
||||
checkAuth,
|
||||
refreshSession,
|
||||
clearError,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@ -1,15 +1,14 @@
|
||||
/**
|
||||
* Client-Side Authentication Store
|
||||
* Simple Zustand store for auth state management - calls BFF APIs directly
|
||||
* Maintains session state using secure httpOnly cookies issued by the BFF.
|
||||
*/
|
||||
|
||||
import { create } from "zustand";
|
||||
import { persist, createJSONStorage } from "zustand/middleware";
|
||||
import { apiClient, getNullableData } from "@/lib/api";
|
||||
import { getErrorInfo, handleAuthError } from "@/lib/utils/error-handling";
|
||||
import logger from "@customer-portal/logging";
|
||||
import type {
|
||||
AuthTokens,
|
||||
AuthTokensSchema,
|
||||
AuthenticatedUser,
|
||||
LinkWhmcsRequestInput,
|
||||
LoginRequestInput,
|
||||
@ -17,26 +16,19 @@ import type {
|
||||
} from "@customer-portal/domain";
|
||||
import { authResponseSchema } from "@customer-portal/domain/validation";
|
||||
|
||||
const withAuthHeaders = (accessToken?: string) =>
|
||||
accessToken
|
||||
? {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
} as Record<string, string>,
|
||||
}
|
||||
: {};
|
||||
interface SessionState {
|
||||
accessExpiresAt?: string;
|
||||
refreshExpiresAt?: string;
|
||||
}
|
||||
|
||||
interface AuthState {
|
||||
// State
|
||||
export interface AuthState {
|
||||
user: AuthenticatedUser | null;
|
||||
tokens: AuthTokens | null;
|
||||
session: SessionState;
|
||||
isAuthenticated: boolean;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
hydrated: boolean;
|
||||
hasCheckedAuth: boolean;
|
||||
|
||||
// Actions
|
||||
login: (credentials: LoginRequestInput) => Promise<void>;
|
||||
signup: (data: SignupRequestInput) => Promise<void>;
|
||||
logout: () => Promise<void>;
|
||||
@ -49,377 +41,301 @@ interface AuthState {
|
||||
) => Promise<{ needsPasswordSet: boolean; email: string }>;
|
||||
setPassword: (email: string, password: string) => Promise<void>;
|
||||
refreshUser: () => Promise<void>;
|
||||
refreshTokens: () => Promise<void>;
|
||||
refreshSession: () => Promise<void>;
|
||||
checkAuth: () => Promise<void>;
|
||||
clearError: () => void;
|
||||
setTokens: (tokens: AuthTokens) => void;
|
||||
setHydrated: (hydrated: boolean) => void;
|
||||
refreshSession: () => Promise<void>;
|
||||
hydrateUserProfile: (profile: Partial<AuthenticatedUser>) => void;
|
||||
}
|
||||
|
||||
export const useAuthStore = create<AuthState>()(
|
||||
persist(
|
||||
(set, get) => ({
|
||||
// Initial state
|
||||
user: null,
|
||||
tokens: null,
|
||||
isAuthenticated: false,
|
||||
type AuthResponseData = {
|
||||
user: AuthenticatedUser;
|
||||
tokens: AuthTokensSchema;
|
||||
};
|
||||
|
||||
export const useAuthStore = create<AuthState>()((set, get) => {
|
||||
const applyAuthResponse = (data: AuthResponseData) => {
|
||||
set({
|
||||
user: data.user,
|
||||
session: {
|
||||
accessExpiresAt: data.tokens.expiresAt,
|
||||
refreshExpiresAt: data.tokens.refreshExpiresAt,
|
||||
},
|
||||
isAuthenticated: true,
|
||||
loading: false,
|
||||
error: null,
|
||||
hydrated: false,
|
||||
hasCheckedAuth: false,
|
||||
|
||||
// Actions
|
||||
login: async (credentials: LoginRequestInput) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
// Use shared API client with consistent configuration
|
||||
const response = await apiClient.POST("/auth/login", { body: credentials });
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Login failed");
|
||||
}
|
||||
|
||||
const { user, tokens } = parsed.data;
|
||||
|
||||
set({
|
||||
user,
|
||||
tokens,
|
||||
isAuthenticated: true,
|
||||
loading: false,
|
||||
error: null,
|
||||
});
|
||||
} catch (error) {
|
||||
const errorInfo = getErrorInfo(error);
|
||||
set({
|
||||
loading: false,
|
||||
error: errorInfo.message,
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
signup: async (data: SignupRequestInput) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/signup", { body: data });
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Signup failed");
|
||||
}
|
||||
|
||||
const { user, tokens } = parsed.data;
|
||||
|
||||
set({
|
||||
user,
|
||||
tokens,
|
||||
isAuthenticated: true,
|
||||
loading: false,
|
||||
error: null,
|
||||
});
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Signup failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
logout: async () => {
|
||||
const { tokens } = get();
|
||||
|
||||
try {
|
||||
if (tokens?.accessToken) {
|
||||
await apiClient.POST("/auth/logout", {
|
||||
...withAuthHeaders(tokens.accessToken),
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore logout errors - clear local state anyway
|
||||
logger.warn(
|
||||
{ error: error instanceof Error ? error.message : String(error) },
|
||||
"Logout API call failed"
|
||||
);
|
||||
}
|
||||
|
||||
set({
|
||||
user: null,
|
||||
tokens: null,
|
||||
isAuthenticated: false,
|
||||
error: null,
|
||||
});
|
||||
},
|
||||
|
||||
requestPasswordReset: async (email: string) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/request-password-reset", {
|
||||
body: { email },
|
||||
});
|
||||
|
||||
if (!response.data) {
|
||||
throw new Error("Password reset request failed");
|
||||
}
|
||||
|
||||
set({ loading: false });
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Password reset request failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
resetPassword: async (token: string, password: string) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/reset-password", {
|
||||
body: { token, password },
|
||||
});
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Password reset failed");
|
||||
}
|
||||
|
||||
const { user, tokens } = parsed.data;
|
||||
|
||||
set({
|
||||
user,
|
||||
tokens,
|
||||
isAuthenticated: true,
|
||||
loading: false,
|
||||
error: null,
|
||||
});
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Password reset failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
changePassword: async (currentPassword: string, newPassword: string) => {
|
||||
const { tokens } = get();
|
||||
if (!tokens?.accessToken) throw new Error("Not authenticated");
|
||||
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/change-password", {
|
||||
...withAuthHeaders(tokens.accessToken),
|
||||
body: { currentPassword, newPassword },
|
||||
});
|
||||
|
||||
if (!response.data) {
|
||||
throw new Error("Password change failed");
|
||||
}
|
||||
|
||||
set({ loading: false });
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Password change failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
checkPasswordNeeded: async (email: string) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/check-password-needed", {
|
||||
body: { email },
|
||||
});
|
||||
|
||||
if (!response.data) {
|
||||
throw new Error("Check failed");
|
||||
}
|
||||
|
||||
set({ loading: false });
|
||||
return response.data as { needsPasswordSet: boolean };
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Check failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
linkWhmcs: async ({ email, password }: LinkWhmcsRequestInput) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/link-whmcs", {
|
||||
body: { email, password },
|
||||
});
|
||||
|
||||
if (!response.data) {
|
||||
throw new Error("WHMCS link failed");
|
||||
}
|
||||
|
||||
set({ loading: false });
|
||||
const result = response.data as { needsPasswordSet: boolean };
|
||||
return { ...result, email };
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "WHMCS link failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
setPassword: async (email: string, password: string) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/set-password", {
|
||||
body: { email, password },
|
||||
});
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Set password failed");
|
||||
}
|
||||
|
||||
const { user, tokens } = parsed.data;
|
||||
|
||||
set({
|
||||
user,
|
||||
tokens,
|
||||
isAuthenticated: true,
|
||||
loading: false,
|
||||
error: null,
|
||||
});
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Set password failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
refreshUser: async () => {
|
||||
const { tokens } = get();
|
||||
if (!tokens?.accessToken) return;
|
||||
|
||||
try {
|
||||
const response = await apiClient.GET<AuthenticatedUser>("/me", {
|
||||
...withAuthHeaders(tokens.accessToken),
|
||||
});
|
||||
|
||||
const profile = getNullableData<AuthenticatedUser>(response);
|
||||
if (!profile) {
|
||||
// Token might be expired, try to refresh
|
||||
await get().refreshTokens();
|
||||
return;
|
||||
}
|
||||
|
||||
set({ user: profile });
|
||||
} catch (error) {
|
||||
// Token might be expired, try to refresh
|
||||
const shouldLogout = handleAuthError(error, get().logout);
|
||||
if (!shouldLogout) {
|
||||
await get().refreshTokens();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
refreshTokens: async () => {
|
||||
const { tokens } = get();
|
||||
if (!tokens?.refreshToken) {
|
||||
// No refresh token available, logout
|
||||
await get().logout();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/refresh", {
|
||||
body: {
|
||||
refreshToken: tokens.refreshToken,
|
||||
deviceId: localStorage.getItem("deviceId") || undefined,
|
||||
},
|
||||
});
|
||||
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Token refresh failed");
|
||||
}
|
||||
|
||||
const { tokens: newTokens } = parsed.data;
|
||||
set({ tokens: newTokens, isAuthenticated: true });
|
||||
} catch (error) {
|
||||
// Refresh failed, logout
|
||||
const shouldLogout = handleAuthError(error, get().logout);
|
||||
if (!shouldLogout) {
|
||||
await get().logout();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
checkAuth: async () => {
|
||||
const { tokens, isAuthenticated } = get();
|
||||
|
||||
set({ hasCheckedAuth: true });
|
||||
|
||||
if (!isAuthenticated || !tokens) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if access token is close to expiry (within 5 minutes)
|
||||
const expiryTime = new Date(tokens.expiresAt).getTime();
|
||||
const now = Date.now();
|
||||
const fiveMinutes = 5 * 60 * 1000;
|
||||
|
||||
if (expiryTime - now < fiveMinutes) {
|
||||
await get().refreshTokens();
|
||||
}
|
||||
},
|
||||
|
||||
clearError: () => set({ error: null }),
|
||||
|
||||
setTokens: (tokens: AuthTokens) => {
|
||||
set({ tokens, isAuthenticated: true });
|
||||
},
|
||||
|
||||
setHydrated: (hydrated: boolean) => {
|
||||
set({ hydrated });
|
||||
},
|
||||
|
||||
refreshSession: async () => {
|
||||
await get().refreshUser();
|
||||
},
|
||||
}),
|
||||
{
|
||||
name: "auth-store",
|
||||
storage: createJSONStorage(() => localStorage),
|
||||
partialize: state => ({
|
||||
user: state.user,
|
||||
tokens: state.tokens,
|
||||
isAuthenticated: state.isAuthenticated,
|
||||
}),
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
// Selectors for easy access
|
||||
export const selectAuthTokens = (state: AuthState) => state.tokens;
|
||||
export const selectIsAuthenticated = (state: AuthState) => state.isAuthenticated;
|
||||
export const selectAuthUser = (state: AuthState) => state.user;
|
||||
|
||||
export const useAuthSession = () => {
|
||||
const tokens = useAuthStore(selectAuthTokens);
|
||||
const isAuthenticated = useAuthStore(selectIsAuthenticated);
|
||||
const user = useAuthStore(selectAuthUser);
|
||||
const hasValidToken = Boolean(
|
||||
tokens?.accessToken && tokens?.expiresAt && new Date(tokens.expiresAt).getTime() > Date.now()
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
user: null,
|
||||
session: {},
|
||||
isAuthenticated: false,
|
||||
loading: false,
|
||||
error: null,
|
||||
hasCheckedAuth: false,
|
||||
|
||||
login: async credentials => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/login", { body: credentials });
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Login failed");
|
||||
}
|
||||
applyAuthResponse(parsed.data);
|
||||
} catch (error) {
|
||||
const errorInfo = getErrorInfo(error);
|
||||
set({ loading: false, error: errorInfo.message, isAuthenticated: false });
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
signup: async data => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/signup", { body: data });
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Signup failed");
|
||||
}
|
||||
applyAuthResponse(parsed.data);
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Signup failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
logout: async () => {
|
||||
try {
|
||||
await apiClient.POST("/auth/logout", {});
|
||||
} catch (error) {
|
||||
logger.warn(error, "Logout API call failed");
|
||||
} finally {
|
||||
set({
|
||||
user: null,
|
||||
session: {},
|
||||
isAuthenticated: false,
|
||||
error: null,
|
||||
loading: false,
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
requestPasswordReset: async (email: string) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
await apiClient.POST("/auth/request-password-reset", { body: { email } });
|
||||
set({ loading: false });
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Password reset request failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
resetPassword: async (token: string, password: string) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/reset-password", {
|
||||
body: { token, password },
|
||||
});
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Password reset failed");
|
||||
}
|
||||
applyAuthResponse(parsed.data);
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Password reset failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
changePassword: async (currentPassword: string, newPassword: string) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/change-password", {
|
||||
body: { currentPassword, newPassword },
|
||||
});
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Password change failed");
|
||||
}
|
||||
applyAuthResponse(parsed.data);
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Password change failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
checkPasswordNeeded: async (email: string) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/check-password-needed", {
|
||||
body: { email },
|
||||
});
|
||||
|
||||
if (!response.data) {
|
||||
throw new Error("Check failed");
|
||||
}
|
||||
|
||||
set({ loading: false });
|
||||
return response.data as { needsPasswordSet: boolean };
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Check failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
linkWhmcs: async ({ email, password }: LinkWhmcsRequestInput) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/link-whmcs", {
|
||||
body: { email, password },
|
||||
});
|
||||
|
||||
if (!response.data) {
|
||||
throw new Error("WHMCS link failed");
|
||||
}
|
||||
|
||||
set({ loading: false });
|
||||
const result = response.data as { needsPasswordSet: boolean };
|
||||
return { ...result, email };
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "WHMCS link failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
setPassword: async (email: string, password: string) => {
|
||||
set({ loading: true, error: null });
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/set-password", {
|
||||
body: { email, password },
|
||||
});
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Set password failed");
|
||||
}
|
||||
applyAuthResponse(parsed.data);
|
||||
} catch (error) {
|
||||
set({
|
||||
loading: false,
|
||||
error: error instanceof Error ? error.message : "Set password failed",
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
refreshUser: async () => {
|
||||
try {
|
||||
const response = await apiClient.GET<{ isAuthenticated?: boolean; user?: AuthenticatedUser }>(
|
||||
"/auth/me"
|
||||
);
|
||||
const data = getNullableData(response);
|
||||
if (data?.isAuthenticated && data.user) {
|
||||
set({
|
||||
user: data.user,
|
||||
isAuthenticated: true,
|
||||
error: null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// No active session
|
||||
set({ user: null, isAuthenticated: false, session: {} });
|
||||
} catch (error) {
|
||||
const shouldLogout = handleAuthError(error, get().logout);
|
||||
if (shouldLogout) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const refreshResponse = await apiClient.POST("/auth/refresh", { body: {} });
|
||||
const parsed = authResponseSchema.safeParse(refreshResponse.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Session refresh failed");
|
||||
}
|
||||
applyAuthResponse(parsed.data);
|
||||
} catch (refreshError) {
|
||||
logger.error(refreshError, "Failed to refresh session after auth error");
|
||||
await get().logout();
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
refreshSession: async () => {
|
||||
try {
|
||||
const response = await apiClient.POST("/auth/refresh", { body: {} });
|
||||
const parsed = authResponseSchema.safeParse(response.data);
|
||||
if (!parsed.success) {
|
||||
throw new Error(parsed.error.issues?.[0]?.message ?? "Session refresh failed");
|
||||
}
|
||||
applyAuthResponse(parsed.data);
|
||||
} catch (error) {
|
||||
logger.error(error, "Failed to refresh session");
|
||||
await get().logout();
|
||||
}
|
||||
},
|
||||
|
||||
checkAuth: async () => {
|
||||
set({ hasCheckedAuth: true });
|
||||
await get().refreshUser();
|
||||
},
|
||||
|
||||
clearError: () => set({ error: null }),
|
||||
|
||||
hydrateUserProfile: profile => {
|
||||
set(state => {
|
||||
if (!state.user) {
|
||||
return state;
|
||||
}
|
||||
|
||||
const definedEntries = Object.entries(profile).filter(([, value]) => value !== undefined);
|
||||
if (definedEntries.length === 0) {
|
||||
return state;
|
||||
}
|
||||
|
||||
return {
|
||||
...state,
|
||||
user: {
|
||||
...state.user,
|
||||
...(Object.fromEntries(definedEntries) as Partial<AuthenticatedUser>),
|
||||
},
|
||||
};
|
||||
});
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
export const selectIsAuthenticated = (state: AuthState) => state.isAuthenticated;
|
||||
export const selectAuthUser = (state: AuthState) => state.user;
|
||||
export const selectSession = (state: AuthState) => state.session;
|
||||
|
||||
export const useAuthSession = () => {
|
||||
const user = useAuthStore(selectAuthUser);
|
||||
const isAuthenticated = useAuthStore(selectIsAuthenticated);
|
||||
const session = useAuthStore(selectSession);
|
||||
|
||||
return {
|
||||
tokens,
|
||||
isAuthenticated,
|
||||
user,
|
||||
hasValidToken,
|
||||
isAuthenticated,
|
||||
session,
|
||||
};
|
||||
};
|
||||
|
||||
@ -5,7 +5,8 @@
|
||||
|
||||
export {
|
||||
useAuthStore,
|
||||
selectAuthTokens,
|
||||
useAuthSession,
|
||||
selectIsAuthenticated,
|
||||
selectAuthUser,
|
||||
selectSession,
|
||||
} from "./auth.store";
|
||||
|
||||
@ -61,31 +61,34 @@ type SsoLinkMutationOptions = UseMutationOptions<
|
||||
>;
|
||||
|
||||
async function fetchInvoices(params?: InvoiceQueryParams): Promise<InvoiceList> {
|
||||
const response = await apiClient.GET(
|
||||
"/api/invoices",
|
||||
params ? { params: { query: params } } : undefined
|
||||
const response = await apiClient.GET<InvoiceList>(
|
||||
"/invoices",
|
||||
params ? { params: { query: params as Record<string, unknown> } } : undefined
|
||||
);
|
||||
const data = getDataOrDefault(response, emptyInvoiceList);
|
||||
const data = getDataOrDefault<InvoiceList>(response, emptyInvoiceList);
|
||||
return invoiceListSchema.parse(data);
|
||||
}
|
||||
|
||||
async function fetchInvoice(id: string): Promise<Invoice> {
|
||||
const response = await apiClient.GET("/api/invoices/{id}", { params: { path: { id } } });
|
||||
const invoice = getDataOrThrow(response, "Invoice not found");
|
||||
const response = await apiClient.GET<Invoice>("/invoices/{id}", {
|
||||
params: { path: { id } },
|
||||
});
|
||||
const invoice = getDataOrThrow<Invoice>(response, "Invoice not found");
|
||||
return sharedInvoiceSchema.parse(invoice);
|
||||
}
|
||||
|
||||
async function fetchPaymentMethods(): Promise<PaymentMethodList> {
|
||||
const response = await apiClient.GET("/api/invoices/payment-methods");
|
||||
return getDataOrDefault(response, emptyPaymentMethods);
|
||||
const response = await apiClient.GET<PaymentMethodList>("/invoices/payment-methods");
|
||||
return getDataOrDefault<PaymentMethodList>(response, emptyPaymentMethods);
|
||||
}
|
||||
|
||||
export function useInvoices(
|
||||
params?: InvoiceQueryParams,
|
||||
options?: InvoicesQueryOptions
|
||||
): UseQueryResult<InvoiceList, Error> {
|
||||
const queryParams = params ? (params as Record<string, unknown>) : {};
|
||||
return useQuery({
|
||||
queryKey: queryKeys.billing.invoices(params ?? {}),
|
||||
queryKey: queryKeys.billing.invoices(queryParams),
|
||||
queryFn: () => fetchInvoices(params),
|
||||
...options,
|
||||
});
|
||||
@ -122,7 +125,7 @@ export function useCreateInvoiceSsoLink(
|
||||
> {
|
||||
return useMutation({
|
||||
mutationFn: async ({ invoiceId, target }) => {
|
||||
const response = await apiClient.POST<InvoiceSsoLink>("/api/invoices/{id}/sso-link", {
|
||||
const response = await apiClient.POST<InvoiceSsoLink>("/invoices/{id}/sso-link", {
|
||||
params: {
|
||||
path: { id: invoiceId },
|
||||
query: target ? { target } : undefined,
|
||||
|
||||
@ -29,7 +29,7 @@ export function usePaymentRefresh<T>({
|
||||
setToast({ visible: true, text: "Refreshing payment methods...", tone: "info" });
|
||||
try {
|
||||
try {
|
||||
await apiClient.POST("/api/invoices/payment-methods/refresh");
|
||||
await apiClient.POST("/invoices/payment-methods/refresh");
|
||||
} catch (err) {
|
||||
// Soft-fail cache refresh, still attempt refetch
|
||||
// Payment methods cache refresh failed - silently continue
|
||||
|
||||
@ -56,7 +56,7 @@ export function InvoiceDetailContainer() {
|
||||
void (async () => {
|
||||
setLoadingPaymentMethods(true);
|
||||
try {
|
||||
const response = await apiClient.POST<InvoiceSsoLink>("/api/auth/sso-link", {
|
||||
const response = await apiClient.POST<InvoiceSsoLink>("/auth/sso-link", {
|
||||
body: { path: "index.php?rp=/account/paymentmethods" },
|
||||
});
|
||||
const sso = getDataOrThrow<InvoiceSsoLink>(
|
||||
|
||||
@ -57,7 +57,7 @@ export function PaymentMethodsContainer() {
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await apiClient.POST<InvoiceSsoLink>("/api/auth/sso-link", {
|
||||
const response = await apiClient.POST<InvoiceSsoLink>("/auth/sso-link", {
|
||||
body: { path: "index.php?rp=/account/paymentmethods" },
|
||||
});
|
||||
const sso = getDataOrThrow<InvoiceSsoLink>(response, "Failed to open payment methods portal");
|
||||
|
||||
@ -16,28 +16,48 @@ const emptySimAddons: SimCatalogProduct[] = [];
|
||||
const emptySimActivationFees: SimActivationFeeCatalogItem[] = [];
|
||||
const emptyVpnPlans: VpnCatalogProduct[] = [];
|
||||
|
||||
const defaultInternetCatalog = {
|
||||
plans: emptyInternetPlans,
|
||||
installations: emptyInternetInstallations,
|
||||
addons: emptyInternetAddons,
|
||||
};
|
||||
|
||||
const defaultSimCatalog = {
|
||||
plans: emptySimPlans,
|
||||
activationFees: emptySimActivationFees,
|
||||
addons: emptySimAddons,
|
||||
};
|
||||
|
||||
const defaultVpnCatalog = {
|
||||
plans: emptyVpnPlans,
|
||||
activationFees: emptyVpnPlans,
|
||||
};
|
||||
|
||||
export const catalogService = {
|
||||
async getInternetCatalog(): Promise<{
|
||||
plans: InternetPlanCatalogItem[];
|
||||
installations: InternetInstallationCatalogItem[];
|
||||
addons: InternetAddonCatalogItem[];
|
||||
}> {
|
||||
const response = await apiClient.GET("/api/catalog/internet/plans");
|
||||
return getDataOrDefault(response, {
|
||||
plans: emptyInternetPlans,
|
||||
installations: emptyInternetInstallations,
|
||||
addons: emptyInternetAddons,
|
||||
});
|
||||
const response = await apiClient.GET<typeof defaultInternetCatalog>(
|
||||
"/catalog/internet/plans"
|
||||
);
|
||||
return getDataOrDefault<typeof defaultInternetCatalog>(response, defaultInternetCatalog);
|
||||
},
|
||||
|
||||
async getInternetInstallations(): Promise<InternetInstallationCatalogItem[]> {
|
||||
const response = await apiClient.GET("/api/catalog/internet/installations");
|
||||
return getDataOrDefault(response, emptyInternetInstallations);
|
||||
const response = await apiClient.GET<InternetInstallationCatalogItem[]>(
|
||||
"/catalog/internet/installations"
|
||||
);
|
||||
return getDataOrDefault<InternetInstallationCatalogItem[]>(
|
||||
response,
|
||||
emptyInternetInstallations
|
||||
);
|
||||
},
|
||||
|
||||
async getInternetAddons(): Promise<InternetAddonCatalogItem[]> {
|
||||
const response = await apiClient.GET("/api/catalog/internet/addons");
|
||||
return getDataOrDefault(response, emptyInternetAddons);
|
||||
const response = await apiClient.GET<InternetAddonCatalogItem[]>("/catalog/internet/addons");
|
||||
return getDataOrDefault<InternetAddonCatalogItem[]>(response, emptyInternetAddons);
|
||||
},
|
||||
|
||||
async getSimCatalog(): Promise<{
|
||||
@ -45,37 +65,32 @@ export const catalogService = {
|
||||
activationFees: SimActivationFeeCatalogItem[];
|
||||
addons: SimCatalogProduct[];
|
||||
}> {
|
||||
const response = await apiClient.GET("/api/catalog/sim/plans");
|
||||
return getDataOrDefault(response, {
|
||||
plans: emptySimPlans,
|
||||
activationFees: emptySimActivationFees,
|
||||
addons: emptySimAddons,
|
||||
});
|
||||
const response = await apiClient.GET<typeof defaultSimCatalog>("/catalog/sim/plans");
|
||||
return getDataOrDefault<typeof defaultSimCatalog>(response, defaultSimCatalog);
|
||||
},
|
||||
|
||||
async getSimActivationFees(): Promise<SimActivationFeeCatalogItem[]> {
|
||||
const response = await apiClient.GET("/api/catalog/sim/activation-fees");
|
||||
return getDataOrDefault(response, emptySimActivationFees);
|
||||
const response = await apiClient.GET<SimActivationFeeCatalogItem[]>(
|
||||
"/catalog/sim/activation-fees"
|
||||
);
|
||||
return getDataOrDefault<SimActivationFeeCatalogItem[]>(response, emptySimActivationFees);
|
||||
},
|
||||
|
||||
async getSimAddons(): Promise<SimCatalogProduct[]> {
|
||||
const response = await apiClient.GET("/api/catalog/sim/addons");
|
||||
return getDataOrDefault(response, emptySimAddons);
|
||||
const response = await apiClient.GET<SimCatalogProduct[]>("/catalog/sim/addons");
|
||||
return getDataOrDefault<SimCatalogProduct[]>(response, emptySimAddons);
|
||||
},
|
||||
|
||||
async getVpnCatalog(): Promise<{
|
||||
plans: VpnCatalogProduct[];
|
||||
activationFees: VpnCatalogProduct[];
|
||||
}> {
|
||||
const response = await apiClient.GET("/api/catalog/vpn/plans");
|
||||
return getDataOrDefault(response, {
|
||||
plans: emptyVpnPlans,
|
||||
activationFees: emptyVpnPlans,
|
||||
});
|
||||
const response = await apiClient.GET<typeof defaultVpnCatalog>("/catalog/vpn/plans");
|
||||
return getDataOrDefault<typeof defaultVpnCatalog>(response, defaultVpnCatalog);
|
||||
},
|
||||
|
||||
async getVpnActivationFees(): Promise<VpnCatalogProduct[]> {
|
||||
const response = await apiClient.GET("/api/catalog/vpn/activation-fees");
|
||||
return getDataOrDefault(response, emptyVpnPlans);
|
||||
const response = await apiClient.GET<VpnCatalogProduct[]>("/catalog/vpn/activation-fees");
|
||||
return getDataOrDefault<VpnCatalogProduct[]>(response, emptyVpnPlans);
|
||||
},
|
||||
};
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
*/
|
||||
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { useAuthStore } from "@/features/auth/services/auth.store";
|
||||
import { useAuthSession } from "@/features/auth/services/auth.store";
|
||||
import { apiClient, queryKeys, getDataOrThrow } from "@/lib/api";
|
||||
import type { DashboardSummary, DashboardError } from "@customer-portal/domain";
|
||||
|
||||
@ -23,12 +23,12 @@ class DashboardDataError extends Error {
|
||||
* Hook for fetching dashboard summary data
|
||||
*/
|
||||
export function useDashboardSummary() {
|
||||
const { isAuthenticated, tokens } = useAuthStore();
|
||||
const { isAuthenticated } = useAuthSession();
|
||||
|
||||
return useQuery<DashboardSummary, DashboardError>({
|
||||
queryKey: queryKeys.dashboard.summary(),
|
||||
queryFn: async () => {
|
||||
if (!tokens?.accessToken) {
|
||||
if (!isAuthenticated) {
|
||||
throw new DashboardDataError(
|
||||
"AUTHENTICATION_REQUIRED",
|
||||
"Authentication required to fetch dashboard data"
|
||||
@ -36,7 +36,7 @@ export function useDashboardSummary() {
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await apiClient.GET<DashboardSummary>("/api/dashboard/summary");
|
||||
const response = await apiClient.GET<DashboardSummary>("/dashboard/summary");
|
||||
return getDataOrThrow<DashboardSummary>(response, "Dashboard summary response was empty");
|
||||
} catch (error) {
|
||||
// Transform API errors to DashboardError format
|
||||
@ -55,7 +55,7 @@ export function useDashboardSummary() {
|
||||
},
|
||||
staleTime: 2 * 60 * 1000, // 2 minutes
|
||||
gcTime: 5 * 60 * 1000, // 5 minutes (formerly cacheTime)
|
||||
enabled: isAuthenticated && !!tokens?.accessToken,
|
||||
enabled: isAuthenticated,
|
||||
retry: (failureCount, error) => {
|
||||
// Don't retry authentication errors
|
||||
if (error?.code === "AUTHENTICATION_REQUIRED") {
|
||||
|
||||
@ -1,40 +1,8 @@
|
||||
import { createClient } from "@/lib/api";
|
||||
import { apiClient } from "@/lib/api";
|
||||
import type { CreateOrderRequest } from "@customer-portal/domain";
|
||||
|
||||
const API_BASE = process.env.NEXT_PUBLIC_API_BASE || "http://localhost:4000";
|
||||
|
||||
interface AuthStoreSnapshot {
|
||||
state?: {
|
||||
tokens?: {
|
||||
accessToken?: unknown;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
const getAuthHeader = (): string | undefined => {
|
||||
if (typeof window === "undefined") return undefined;
|
||||
|
||||
const authStore = window.localStorage.getItem("auth-store");
|
||||
if (!authStore) return undefined;
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(authStore) as AuthStoreSnapshot;
|
||||
const token = parsed?.state?.tokens?.accessToken;
|
||||
return typeof token === "string" && token ? `Bearer ${token}` : undefined;
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
const createAuthedClient = () =>
|
||||
createClient({
|
||||
baseUrl: API_BASE,
|
||||
getAuthHeader,
|
||||
});
|
||||
|
||||
async function createOrder<T = { sfOrderId: string }>(payload: CreateOrderRequest): Promise<T> {
|
||||
const apiClient = createAuthedClient();
|
||||
const response = await apiClient.POST("/api/orders", { body: payload });
|
||||
const response = await apiClient.POST("/orders", { body: payload });
|
||||
if (!response.data) {
|
||||
throw new Error("Order creation failed");
|
||||
}
|
||||
@ -42,14 +10,12 @@ async function createOrder<T = { sfOrderId: string }>(payload: CreateOrderReques
|
||||
}
|
||||
|
||||
async function getMyOrders<T = unknown[]>(): Promise<T> {
|
||||
const apiClient = createAuthedClient();
|
||||
const response = await apiClient.GET("/api/orders/user");
|
||||
const response = await apiClient.GET("/orders/user");
|
||||
return (response.data ?? []) as T;
|
||||
}
|
||||
|
||||
async function getOrderById<T = unknown>(orderId: string): Promise<T> {
|
||||
const apiClient = createAuthedClient();
|
||||
const response = await apiClient.GET("/api/orders/{sfOrderId}", {
|
||||
const response = await apiClient.GET("/orders/{sfOrderId}", {
|
||||
params: { path: { sfOrderId: orderId } },
|
||||
});
|
||||
if (!response.data) {
|
||||
|
||||
@ -42,7 +42,7 @@ export function ChangePlanModal({
|
||||
}
|
||||
setLoading(true);
|
||||
try {
|
||||
await apiClient.POST("/api/subscriptions/{id}/sim/change-plan", {
|
||||
await apiClient.POST("/subscriptions/{id}/sim/change-plan", {
|
||||
params: { path: { id: subscriptionId } },
|
||||
body: {
|
||||
newPlanCode,
|
||||
|
||||
@ -58,7 +58,7 @@ export function SimActions({
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
await apiClient.POST("/api/subscriptions/{id}/sim/reissue-esim", {
|
||||
await apiClient.POST("/subscriptions/{id}/sim/reissue-esim", {
|
||||
params: { path: { id: subscriptionId } },
|
||||
});
|
||||
|
||||
@ -77,7 +77,7 @@ export function SimActions({
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
await apiClient.POST("/api/subscriptions/{id}/sim/cancel", {
|
||||
await apiClient.POST("/subscriptions/{id}/sim/cancel", {
|
||||
params: { path: { id: subscriptionId } },
|
||||
});
|
||||
|
||||
|
||||
@ -75,7 +75,7 @@ export function SimFeatureToggles({
|
||||
if (nt !== initial.nt) featurePayload.networkType = nt;
|
||||
|
||||
if (Object.keys(featurePayload).length > 0) {
|
||||
await apiClient.POST("/api/subscriptions/{id}/sim/features", {
|
||||
await apiClient.POST("/subscriptions/{id}/sim/features", {
|
||||
params: { path: { id: subscriptionId } },
|
||||
body: featurePayload,
|
||||
});
|
||||
|
||||
@ -30,7 +30,7 @@ export function SimManagementSection({ subscriptionId }: SimManagementSectionPro
|
||||
try {
|
||||
setError(null);
|
||||
|
||||
const response = await apiClient.GET("/api/subscriptions/{id}/sim", {
|
||||
const response = await apiClient.GET("/subscriptions/{id}/sim", {
|
||||
params: { path: { id: subscriptionId } },
|
||||
});
|
||||
|
||||
|
||||
@ -45,7 +45,7 @@ export function TopUpModal({ subscriptionId, onClose, onSuccess, onError }: TopU
|
||||
quotaMb: getCurrentAmountMb(),
|
||||
};
|
||||
|
||||
await apiClient.POST("/api/subscriptions/{id}/sim/top-up", {
|
||||
await apiClient.POST("/subscriptions/{id}/sim/top-up", {
|
||||
params: { path: { id: subscriptionId } },
|
||||
body: requestBody,
|
||||
});
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { apiClient, queryKeys, getDataOrDefault, getDataOrThrow, getNullableData } from "@/lib/api";
|
||||
import { useAuthStore } from "@/features/auth/services";
|
||||
import { useAuthSession } from "@/features/auth/services";
|
||||
import type { InvoiceList, Subscription, SubscriptionList } from "@customer-portal/domain";
|
||||
|
||||
interface UseSubscriptionsOptions {
|
||||
@ -52,20 +52,20 @@ function toSubscriptionList(payload?: SubscriptionList | Subscription[] | null):
|
||||
*/
|
||||
export function useSubscriptions(options: UseSubscriptionsOptions = {}) {
|
||||
const { status } = options;
|
||||
const { isAuthenticated, tokens } = useAuthStore();
|
||||
const { isAuthenticated } = useAuthSession();
|
||||
|
||||
return useQuery<SubscriptionList>({
|
||||
queryKey: queryKeys.subscriptions.list(status ? { status } : undefined),
|
||||
queryFn: async () => {
|
||||
const response = await apiClient.GET(
|
||||
"/api/subscriptions",
|
||||
const response = await apiClient.GET<SubscriptionList>(
|
||||
"/subscriptions",
|
||||
status ? { params: { query: { status } } } : undefined
|
||||
);
|
||||
return toSubscriptionList(getNullableData(response));
|
||||
return toSubscriptionList(getNullableData<SubscriptionList>(response));
|
||||
},
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 10 * 60 * 1000,
|
||||
enabled: isAuthenticated && !!tokens?.accessToken,
|
||||
enabled: isAuthenticated,
|
||||
});
|
||||
}
|
||||
|
||||
@ -73,17 +73,17 @@ export function useSubscriptions(options: UseSubscriptionsOptions = {}) {
|
||||
* Hook to fetch active subscriptions only
|
||||
*/
|
||||
export function useActiveSubscriptions() {
|
||||
const { isAuthenticated, tokens } = useAuthStore();
|
||||
const { isAuthenticated } = useAuthSession();
|
||||
|
||||
return useQuery<Subscription[]>({
|
||||
queryKey: queryKeys.subscriptions.active(),
|
||||
queryFn: async () => {
|
||||
const response = await apiClient.GET("/api/subscriptions/active");
|
||||
return getDataOrDefault(response, [] as Subscription[]);
|
||||
const response = await apiClient.GET<Subscription[]>("/subscriptions/active");
|
||||
return getDataOrDefault<Subscription[]>(response, []);
|
||||
},
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 10 * 60 * 1000,
|
||||
enabled: isAuthenticated && !!tokens?.accessToken,
|
||||
enabled: isAuthenticated,
|
||||
});
|
||||
}
|
||||
|
||||
@ -91,17 +91,17 @@ export function useActiveSubscriptions() {
|
||||
* Hook to fetch subscription statistics
|
||||
*/
|
||||
export function useSubscriptionStats() {
|
||||
const { isAuthenticated, tokens } = useAuthStore();
|
||||
const { isAuthenticated } = useAuthSession();
|
||||
|
||||
return useQuery({
|
||||
queryKey: queryKeys.subscriptions.stats(),
|
||||
queryFn: async () => {
|
||||
const response = await apiClient.GET("/api/subscriptions/stats");
|
||||
return getDataOrDefault(response, emptyStats);
|
||||
const response = await apiClient.GET<typeof emptyStats>("/subscriptions/stats");
|
||||
return getDataOrDefault<typeof emptyStats>(response, emptyStats);
|
||||
},
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 10 * 60 * 1000,
|
||||
enabled: isAuthenticated && !!tokens?.accessToken,
|
||||
enabled: isAuthenticated,
|
||||
});
|
||||
}
|
||||
|
||||
@ -109,19 +109,19 @@ export function useSubscriptionStats() {
|
||||
* Hook to fetch a specific subscription
|
||||
*/
|
||||
export function useSubscription(subscriptionId: number) {
|
||||
const { isAuthenticated, tokens } = useAuthStore();
|
||||
const { isAuthenticated } = useAuthSession();
|
||||
|
||||
return useQuery<Subscription>({
|
||||
queryKey: queryKeys.subscriptions.detail(String(subscriptionId)),
|
||||
queryFn: async () => {
|
||||
const response = await apiClient.GET("/api/subscriptions/{id}", {
|
||||
const response = await apiClient.GET<Subscription>("/subscriptions/{id}", {
|
||||
params: { path: { id: subscriptionId } },
|
||||
});
|
||||
return getDataOrThrow<Subscription>(response, "Subscription not found");
|
||||
},
|
||||
staleTime: 5 * 60 * 1000,
|
||||
gcTime: 10 * 60 * 1000,
|
||||
enabled: isAuthenticated && !!tokens?.accessToken && subscriptionId > 0,
|
||||
enabled: isAuthenticated && subscriptionId > 0,
|
||||
});
|
||||
}
|
||||
|
||||
@ -133,18 +133,18 @@ export function useSubscriptionInvoices(
|
||||
options: { page?: number; limit?: number } = {}
|
||||
) {
|
||||
const { page = 1, limit = 10 } = options;
|
||||
const { isAuthenticated, tokens } = useAuthStore();
|
||||
const { isAuthenticated } = useAuthSession();
|
||||
|
||||
return useQuery<InvoiceList>({
|
||||
queryKey: queryKeys.subscriptions.invoices(subscriptionId, { page, limit }),
|
||||
queryFn: async () => {
|
||||
const response = await apiClient.GET("/api/subscriptions/{id}/invoices", {
|
||||
const response = await apiClient.GET<InvoiceList>("/subscriptions/{id}/invoices", {
|
||||
params: {
|
||||
path: { id: subscriptionId },
|
||||
query: { page, limit },
|
||||
},
|
||||
});
|
||||
return getDataOrDefault(response, {
|
||||
return getDataOrDefault<InvoiceList>(response, {
|
||||
...emptyInvoiceList,
|
||||
pagination: {
|
||||
...emptyInvoiceList.pagination,
|
||||
@ -154,6 +154,6 @@ export function useSubscriptionInvoices(
|
||||
},
|
||||
staleTime: 60 * 1000,
|
||||
gcTime: 5 * 60 * 1000,
|
||||
enabled: isAuthenticated && !!tokens?.accessToken && subscriptionId > 0,
|
||||
enabled: isAuthenticated && subscriptionId > 0,
|
||||
});
|
||||
}
|
||||
|
||||
@ -21,21 +21,21 @@ export interface SimInfo<T, E = unknown> {
|
||||
|
||||
export const simActionsService = {
|
||||
async topUp(subscriptionId: string, request: TopUpRequest): Promise<void> {
|
||||
await apiClient.POST("/api/subscriptions/{subscriptionId}/sim/top-up", {
|
||||
await apiClient.POST("/subscriptions/{subscriptionId}/sim/top-up", {
|
||||
params: { path: { subscriptionId } },
|
||||
body: request,
|
||||
});
|
||||
},
|
||||
|
||||
async changePlan(subscriptionId: string, request: ChangePlanRequest): Promise<void> {
|
||||
await apiClient.POST("/api/subscriptions/{subscriptionId}/sim/change-plan", {
|
||||
await apiClient.POST("/subscriptions/{subscriptionId}/sim/change-plan", {
|
||||
params: { path: { subscriptionId } },
|
||||
body: request,
|
||||
});
|
||||
},
|
||||
|
||||
async cancel(subscriptionId: string, request: CancelRequest): Promise<void> {
|
||||
await apiClient.POST("/api/subscriptions/{subscriptionId}/sim/cancel", {
|
||||
await apiClient.POST("/subscriptions/{subscriptionId}/sim/cancel", {
|
||||
params: { path: { subscriptionId } },
|
||||
body: request,
|
||||
});
|
||||
@ -43,7 +43,7 @@ export const simActionsService = {
|
||||
|
||||
async getSimInfo<T, E = unknown>(subscriptionId: string): Promise<SimInfo<T, E> | null> {
|
||||
const response = await apiClient.GET<SimInfo<T, E> | null>(
|
||||
"/api/subscriptions/{subscriptionId}/sim/info",
|
||||
"/subscriptions/{subscriptionId}/sim/info",
|
||||
{
|
||||
params: { path: { subscriptionId } },
|
||||
}
|
||||
|
||||
@ -9,7 +9,6 @@ export * from "./response-helpers";
|
||||
// Import createClient for internal use
|
||||
import { createClient } from "./runtime/client";
|
||||
|
||||
// Create the apiClient instance that the app expects
|
||||
export const apiClient = createClient();
|
||||
|
||||
// Query keys for React Query - matching the expected structure
|
||||
|
||||
@ -135,19 +135,43 @@ export function createClient(options: CreateClientOptions = {}): ApiClient {
|
||||
|
||||
const handleError = options.handleError ?? defaultHandleError;
|
||||
|
||||
const normalizePath = (path: string): string => {
|
||||
if (!path) return "/api";
|
||||
const ensured = path.startsWith("/") ? path : `/${path}`;
|
||||
if (ensured === "/api" || ensured.startsWith("/api/")) {
|
||||
return ensured;
|
||||
}
|
||||
return `/api${ensured}`;
|
||||
};
|
||||
|
||||
if (typeof client.use === "function") {
|
||||
const resolveAuthHeader = options.getAuthHeader;
|
||||
|
||||
const middleware: Middleware = {
|
||||
onRequest({ request }: MiddlewareCallbackParams) {
|
||||
if (!resolveAuthHeader) return;
|
||||
if (!request || typeof request.headers?.has !== "function") return;
|
||||
if (request.headers.has("Authorization")) return;
|
||||
if (!request) return;
|
||||
|
||||
const nextRequest = new Request(request, {
|
||||
credentials: "include",
|
||||
});
|
||||
|
||||
if (!resolveAuthHeader) {
|
||||
return nextRequest;
|
||||
}
|
||||
if (typeof nextRequest.headers?.has !== "function") {
|
||||
return nextRequest;
|
||||
}
|
||||
if (nextRequest.headers.has("Authorization")) {
|
||||
return nextRequest;
|
||||
}
|
||||
|
||||
const headerValue = resolveAuthHeader();
|
||||
if (!headerValue) return;
|
||||
if (!headerValue) {
|
||||
return nextRequest;
|
||||
}
|
||||
|
||||
request.headers.set("Authorization", headerValue);
|
||||
nextRequest.headers.set("Authorization", headerValue);
|
||||
return nextRequest;
|
||||
},
|
||||
async onResponse({ response }: MiddlewareCallbackParams & { response: Response }) {
|
||||
await handleError(response);
|
||||
@ -157,7 +181,29 @@ export function createClient(options: CreateClientOptions = {}): ApiClient {
|
||||
client.use(middleware as never);
|
||||
}
|
||||
|
||||
return client as ApiClient;
|
||||
const flexibleClient = client as ApiClient;
|
||||
|
||||
flexibleClient.GET = (async (path: string, options?: unknown) => {
|
||||
return (client.GET as FlexibleApiMethods["GET"])(normalizePath(path), options);
|
||||
}) as ApiClient["GET"];
|
||||
|
||||
flexibleClient.POST = (async (path: string, options?: unknown) => {
|
||||
return (client.POST as FlexibleApiMethods["POST"])(normalizePath(path), options);
|
||||
}) as ApiClient["POST"];
|
||||
|
||||
flexibleClient.PUT = (async (path: string, options?: unknown) => {
|
||||
return (client.PUT as FlexibleApiMethods["PUT"])(normalizePath(path), options);
|
||||
}) as ApiClient["PUT"];
|
||||
|
||||
flexibleClient.PATCH = (async (path: string, options?: unknown) => {
|
||||
return (client.PATCH as FlexibleApiMethods["PATCH"])(normalizePath(path), options);
|
||||
}) as ApiClient["PATCH"];
|
||||
|
||||
flexibleClient.DELETE = (async (path: string, options?: unknown) => {
|
||||
return (client.DELETE as FlexibleApiMethods["DELETE"])(normalizePath(path), options);
|
||||
}) as ApiClient["DELETE"];
|
||||
|
||||
return flexibleClient;
|
||||
}
|
||||
|
||||
export type { paths };
|
||||
|
||||
383
docs/MEMORY_OPTIMIZATION.md
Normal file
383
docs/MEMORY_OPTIMIZATION.md
Normal file
@ -0,0 +1,383 @@
|
||||
# 📊 Bundle Analysis Guide
|
||||
|
||||
Simple guide for analyzing and optimizing bundle sizes.
|
||||
|
||||
## 🎯 Quick Analysis
|
||||
|
||||
### Frontend Bundle Analysis
|
||||
```bash
|
||||
# Analyze bundle size
|
||||
pnpm analyze
|
||||
|
||||
# Or use the script
|
||||
pnpm bundle-analyze
|
||||
```
|
||||
|
||||
### Key Metrics to Monitor
|
||||
- **First Load JS**: Should be < 250KB
|
||||
- **Total Bundle Size**: Should be < 1MB
|
||||
- **Largest Chunks**: Identify optimization targets
|
||||
|
||||
## 🎯 Frontend Optimizations
|
||||
|
||||
### 1. Bundle Analysis & Code Splitting
|
||||
|
||||
```bash
|
||||
# Analyze current bundle size
|
||||
cd apps/portal
|
||||
pnpm run analyze
|
||||
|
||||
# Build with analysis
|
||||
pnpm run build:analyze
|
||||
```
|
||||
|
||||
### 2. Dynamic Imports
|
||||
|
||||
```typescript
|
||||
// Before: Static import
|
||||
import { HeavyComponent } from './HeavyComponent';
|
||||
|
||||
// After: Dynamic import
|
||||
const HeavyComponent = lazy(() => import('./HeavyComponent'));
|
||||
|
||||
// Route-level code splitting
|
||||
const Dashboard = lazy(() => import('./pages/Dashboard'));
|
||||
const Orders = lazy(() => import('./pages/Orders'));
|
||||
```
|
||||
|
||||
### 3. Image Optimization
|
||||
|
||||
```typescript
|
||||
// Use Next.js Image component with optimization
|
||||
import Image from 'next/image';
|
||||
|
||||
<Image
|
||||
src="/hero.jpg"
|
||||
alt="Hero"
|
||||
width={800}
|
||||
height={600}
|
||||
priority={false} // Lazy load non-critical images
|
||||
placeholder="blur" // Add blur placeholder
|
||||
/>
|
||||
```
|
||||
|
||||
### 4. Tree Shaking Optimization
|
||||
|
||||
```typescript
|
||||
// Before: Import entire library
|
||||
import * as _ from 'lodash';
|
||||
|
||||
// After: Import specific functions
|
||||
import { debounce, throttle } from 'lodash-es';
|
||||
|
||||
// Or use individual packages
|
||||
import debounce from 'lodash.debounce';
|
||||
```
|
||||
|
||||
### 5. React Query Optimization
|
||||
|
||||
```typescript
|
||||
// Optimize React Query cache
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
// Reduce memory usage
|
||||
cacheTime: 5 * 60 * 1000, // 5 minutes
|
||||
staleTime: 1 * 60 * 1000, // 1 minute
|
||||
// Limit concurrent queries
|
||||
refetchOnWindowFocus: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## 🎯 Backend Optimizations
|
||||
|
||||
### 1. Heap Size Optimization
|
||||
|
||||
```json
|
||||
// package.json - Optimized heap sizes
|
||||
{
|
||||
"scripts": {
|
||||
"dev": "NODE_OPTIONS=\"--max-old-space-size=2048\" nest start --watch",
|
||||
"build": "NODE_OPTIONS=\"--max-old-space-size=3072\" nest build",
|
||||
"type-check": "NODE_OPTIONS=\"--max-old-space-size=4096\" tsc --noEmit"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Streaming Responses
|
||||
|
||||
```typescript
|
||||
// For large data responses
|
||||
@Get('large-dataset')
|
||||
async getLargeDataset(@Res() res: Response) {
|
||||
const stream = this.dataService.createDataStream();
|
||||
|
||||
res.setHeader('Content-Type', 'application/json');
|
||||
res.setHeader('Transfer-Encoding', 'chunked');
|
||||
|
||||
stream.pipe(res);
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Memory-Efficient Pagination
|
||||
|
||||
```typescript
|
||||
// Cursor-based pagination instead of offset
|
||||
interface PaginationOptions {
|
||||
cursor?: string;
|
||||
limit: number; // Max 100
|
||||
}
|
||||
|
||||
async findWithCursor(options: PaginationOptions) {
|
||||
return this.prisma.order.findMany({
|
||||
take: Math.min(options.limit, 100),
|
||||
...(options.cursor && {
|
||||
cursor: { id: options.cursor },
|
||||
skip: 1,
|
||||
}),
|
||||
orderBy: { createdAt: 'desc' },
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Request/Response Caching
|
||||
|
||||
```typescript
|
||||
// Memory-efficient caching
|
||||
@Injectable()
|
||||
export class CacheService {
|
||||
private readonly cache = new Map<string, { data: any; expires: number }>();
|
||||
private readonly maxSize = 1000; // Limit cache size
|
||||
|
||||
set(key: string, data: any, ttl: number = 300000) {
|
||||
// Implement LRU eviction
|
||||
if (this.cache.size >= this.maxSize) {
|
||||
const firstKey = this.cache.keys().next().value;
|
||||
this.cache.delete(firstKey);
|
||||
}
|
||||
|
||||
this.cache.set(key, {
|
||||
data,
|
||||
expires: Date.now() + ttl,
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 5. Database Connection Optimization
|
||||
|
||||
```typescript
|
||||
// Optimize Prisma connection pool
|
||||
const prisma = new PrismaClient({
|
||||
datasources: {
|
||||
db: {
|
||||
url: process.env.DATABASE_URL,
|
||||
},
|
||||
},
|
||||
// Optimize connection pool
|
||||
__internal: {
|
||||
engine: {
|
||||
connectionLimit: 10, // Reduce from default 20
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## 🎯 Dependency Optimizations
|
||||
|
||||
### 1. Replace Heavy Dependencies
|
||||
|
||||
```bash
|
||||
# Before: moment.js (67KB)
|
||||
npm uninstall moment
|
||||
|
||||
# After: date-fns (13KB with tree shaking)
|
||||
npm install date-fns
|
||||
|
||||
# Before: lodash (71KB)
|
||||
npm uninstall lodash
|
||||
|
||||
# After: Individual functions or native alternatives
|
||||
npm install lodash-es # Better tree shaking
|
||||
```
|
||||
|
||||
### 2. Bundle Analysis Results
|
||||
|
||||
```bash
|
||||
# Run bundle analysis
|
||||
./scripts/memory-optimization.sh
|
||||
|
||||
# Key metrics to monitor:
|
||||
# - First Load JS: < 250KB
|
||||
# - Total Bundle Size: < 1MB
|
||||
# - Largest Chunks: Identify optimization targets
|
||||
```
|
||||
|
||||
### 3. Webpack Optimizations (Already Implemented)
|
||||
|
||||
- **Code Splitting**: Separate vendor, common, and UI chunks
|
||||
- **Tree Shaking**: Remove unused code
|
||||
- **Compression**: Gzip/Brotli compression
|
||||
- **Caching**: Long-term caching for static assets
|
||||
|
||||
## 🎯 Runtime Optimizations
|
||||
|
||||
### 1. Memory Leak Detection
|
||||
|
||||
```typescript
|
||||
// Add memory monitoring
|
||||
@Injectable()
|
||||
export class MemoryMonitorService {
|
||||
@Cron('*/5 * * * *') // Every 5 minutes
|
||||
checkMemoryUsage() {
|
||||
const usage = process.memoryUsage();
|
||||
|
||||
if (usage.heapUsed > 500 * 1024 * 1024) { // 500MB
|
||||
this.logger.warn('High memory usage detected', {
|
||||
heapUsed: `${Math.round(usage.heapUsed / 1024 / 1024)}MB`,
|
||||
heapTotal: `${Math.round(usage.heapTotal / 1024 / 1024)}MB`,
|
||||
external: `${Math.round(usage.external / 1024 / 1024)}MB`,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Garbage Collection Optimization
|
||||
|
||||
```bash
|
||||
# Enable GC logging in production
|
||||
NODE_OPTIONS="--max-old-space-size=2048 --gc-interval=100" npm start
|
||||
|
||||
# Monitor GC patterns
|
||||
NODE_OPTIONS="--trace-gc --trace-gc-verbose" npm run dev
|
||||
```
|
||||
|
||||
### 3. Worker Threads for CPU-Intensive Tasks
|
||||
|
||||
```typescript
|
||||
// For heavy computations
|
||||
import { Worker, isMainThread, parentPort } from 'worker_threads';
|
||||
|
||||
if (isMainThread) {
|
||||
// Main thread
|
||||
const worker = new Worker(__filename);
|
||||
worker.postMessage({ data: largeDataset });
|
||||
|
||||
worker.on('message', (result) => {
|
||||
// Handle processed result
|
||||
});
|
||||
} else {
|
||||
// Worker thread
|
||||
parentPort?.on('message', ({ data }) => {
|
||||
const result = processLargeDataset(data);
|
||||
parentPort?.postMessage(result);
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
## 📈 Monitoring & Metrics
|
||||
|
||||
### 1. Performance Monitoring
|
||||
|
||||
```typescript
|
||||
// Add performance metrics
|
||||
@Injectable()
|
||||
export class PerformanceService {
|
||||
trackMemoryUsage(operation: string) {
|
||||
const start = process.memoryUsage();
|
||||
|
||||
return {
|
||||
end: () => {
|
||||
const end = process.memoryUsage();
|
||||
const diff = {
|
||||
heapUsed: end.heapUsed - start.heapUsed,
|
||||
heapTotal: end.heapTotal - start.heapTotal,
|
||||
};
|
||||
|
||||
this.logger.debug(`Memory usage for ${operation}`, diff);
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Bundle Size Monitoring
|
||||
|
||||
```json
|
||||
// Add to CI/CD pipeline
|
||||
{
|
||||
"scripts": {
|
||||
"build:check-size": "npm run build && bundlesize"
|
||||
},
|
||||
"bundlesize": [
|
||||
{
|
||||
"path": ".next/static/js/*.js",
|
||||
"maxSize": "250kb"
|
||||
},
|
||||
{
|
||||
"path": ".next/static/css/*.css",
|
||||
"maxSize": "50kb"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## 🚀 Implementation Checklist
|
||||
|
||||
### Immediate Actions (Week 1)
|
||||
- [ ] Run bundle analysis: `pnpm run analyze`
|
||||
- [ ] Implement dynamic imports for heavy components
|
||||
- [ ] Optimize image loading with Next.js Image
|
||||
- [ ] Reduce heap allocation in development
|
||||
|
||||
### Short-term (Week 2-3)
|
||||
- [ ] Replace heavy dependencies (moment → date-fns)
|
||||
- [ ] Implement request caching
|
||||
- [ ] Add memory monitoring
|
||||
- [ ] Optimize database connection pool
|
||||
|
||||
### Long-term (Month 1)
|
||||
- [ ] Implement streaming for large responses
|
||||
- [ ] Add worker threads for CPU-intensive tasks
|
||||
- [ ] Set up continuous bundle size monitoring
|
||||
- [ ] Implement advanced caching strategies
|
||||
|
||||
## 🎯 Expected Results
|
||||
|
||||
### Memory Reduction Targets
|
||||
- **Frontend Bundle**: 30-50% reduction
|
||||
- **Backend Heap**: 25-40% reduction
|
||||
- **Build Time**: 20-30% improvement
|
||||
- **Runtime Memory**: 35-50% reduction
|
||||
|
||||
### Performance Improvements
|
||||
- **First Load**: < 2 seconds
|
||||
- **Page Transitions**: < 500ms
|
||||
- **API Response**: < 200ms (95th percentile)
|
||||
- **Memory Stability**: No memory leaks in 24h+ runs
|
||||
|
||||
## 🔧 Tools & Commands
|
||||
|
||||
```bash
|
||||
# Frontend analysis
|
||||
cd apps/portal && pnpm run analyze
|
||||
|
||||
# Backend memory check
|
||||
cd apps/bff && NODE_OPTIONS="--trace-gc" pnpm dev
|
||||
|
||||
# Full optimization analysis
|
||||
./scripts/memory-optimization.sh
|
||||
|
||||
# Dependency audit
|
||||
pnpm audit --recursive
|
||||
|
||||
# Bundle size check
|
||||
pnpm run build && ls -la .next/static/js/
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**Note**: Always test memory optimizations in a staging environment before deploying to production. Monitor application performance and user experience after implementing changes.
|
||||
File diff suppressed because one or more lines are too long
@ -12,7 +12,7 @@
|
||||
"predev": "pnpm --filter @customer-portal/domain build",
|
||||
"dev": "./scripts/dev/manage.sh apps",
|
||||
"dev:all": "pnpm --parallel --filter @customer-portal/domain --filter @customer-portal/portal --filter @customer-portal/bff run dev",
|
||||
"build": "NODE_OPTIONS=\"--max-old-space-size=12288 --max-semi-space-size=512\" pnpm --recursive --reporter=default run build",
|
||||
"build": "pnpm --recursive run build",
|
||||
"start": "pnpm --parallel --filter @customer-portal/portal --filter @customer-portal/bff run start",
|
||||
"test": "pnpm --recursive run test",
|
||||
"lint": "pnpm --recursive run lint",
|
||||
@ -21,13 +21,15 @@
|
||||
"format:check": "prettier -c .",
|
||||
"prepare": "husky",
|
||||
"type-check": "pnpm type-check:packages && pnpm type-check:apps",
|
||||
"type-check:workspace": "NODE_OPTIONS=\"--max-old-space-size=8192 --max-semi-space-size=256\" tsc -b --pretty false --noEmit",
|
||||
"type-check:workspace": "tsc -b --noEmit",
|
||||
"type-check:packages": "pnpm --workspace-concurrency=1 --filter @customer-portal/domain --filter @customer-portal/validation --filter @customer-portal/logging run type-check",
|
||||
"type-check:apps": "pnpm --workspace-concurrency=1 --filter @customer-portal/bff --filter @customer-portal/portal run type-check",
|
||||
"clean": "pnpm --recursive run clean",
|
||||
"dev:start": "./scripts/dev/manage.sh start",
|
||||
"dev:stop": "./scripts/dev/manage.sh stop",
|
||||
"dev:restart": "./scripts/dev/manage.sh restart",
|
||||
"analyze": "pnpm --filter @customer-portal/portal run analyze",
|
||||
"bundle-analyze": "./scripts/bundle-analyze.sh",
|
||||
"dev:tools": "./scripts/dev/manage.sh tools",
|
||||
"dev:apps": "./scripts/dev/manage.sh apps",
|
||||
"dev:logs": "./scripts/dev/manage.sh logs",
|
||||
@ -71,8 +73,5 @@
|
||||
"typescript": "^5.9.2",
|
||||
"typescript-eslint": "^8.40.0",
|
||||
"zod": "^4.1.9"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sendgrid/mail": "^8.1.5"
|
||||
}
|
||||
}
|
||||
|
||||
89
packages/domain/package-lock.json
generated
89
packages/domain/package-lock.json
generated
@ -1,89 +0,0 @@
|
||||
{
|
||||
"name": "@customer-portal/domain",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@customer-portal/domain",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"zod": "^4.1.9"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
},
|
||||
"../../node_modules/.pnpm/typescript@5.9.2/node_modules/typescript": {
|
||||
"version": "5.9.2",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@dprint/formatter": "^0.4.1",
|
||||
"@dprint/typescript": "0.93.4",
|
||||
"@esfx/canceltoken": "^1.0.0",
|
||||
"@eslint/js": "^9.20.0",
|
||||
"@octokit/rest": "^21.1.1",
|
||||
"@types/chai": "^4.3.20",
|
||||
"@types/diff": "^7.0.1",
|
||||
"@types/minimist": "^1.2.5",
|
||||
"@types/mocha": "^10.0.10",
|
||||
"@types/ms": "^0.7.34",
|
||||
"@types/node": "latest",
|
||||
"@types/source-map-support": "^0.5.10",
|
||||
"@types/which": "^3.0.4",
|
||||
"@typescript-eslint/rule-tester": "^8.24.1",
|
||||
"@typescript-eslint/type-utils": "^8.24.1",
|
||||
"@typescript-eslint/utils": "^8.24.1",
|
||||
"azure-devops-node-api": "^14.1.0",
|
||||
"c8": "^10.1.3",
|
||||
"chai": "^4.5.0",
|
||||
"chokidar": "^4.0.3",
|
||||
"diff": "^7.0.0",
|
||||
"dprint": "^0.49.0",
|
||||
"esbuild": "^0.25.0",
|
||||
"eslint": "^9.20.1",
|
||||
"eslint-formatter-autolinkable-stylish": "^1.4.0",
|
||||
"eslint-plugin-regexp": "^2.7.0",
|
||||
"fast-xml-parser": "^4.5.2",
|
||||
"glob": "^10.4.5",
|
||||
"globals": "^15.15.0",
|
||||
"hereby": "^1.10.0",
|
||||
"jsonc-parser": "^3.3.1",
|
||||
"knip": "^5.44.4",
|
||||
"minimist": "^1.2.8",
|
||||
"mocha": "^10.8.2",
|
||||
"mocha-fivemat-progress-reporter": "^0.1.0",
|
||||
"monocart-coverage-reports": "^2.12.1",
|
||||
"ms": "^2.1.3",
|
||||
"picocolors": "^1.1.1",
|
||||
"playwright": "^1.50.1",
|
||||
"source-map-support": "^0.5.21",
|
||||
"tslib": "^2.8.1",
|
||||
"typescript": "^5.7.3",
|
||||
"typescript-eslint": "^8.24.1",
|
||||
"which": "^3.0.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"resolved": "../../node_modules/.pnpm/typescript@5.9.2/node_modules/typescript",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/zod": {
|
||||
"version": "4.1.9",
|
||||
"resolved": "https://registry.npmjs.org/zod/-/zod-4.1.9.tgz",
|
||||
"integrity": "sha512-HI32jTq0AUAC125z30E8bQNz0RQ+9Uc+4J7V97gLYjZVKRjeydPgGt6dvQzFrav7MYOUGFqqOGiHpA/fdbd0cQ==",
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/colinhacks"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -79,7 +79,10 @@ export const checkPasswordNeededRequestSchema = z.object({
|
||||
});
|
||||
|
||||
export const refreshTokenRequestSchema = z.object({
|
||||
refreshToken: z.string().min(1, "Refresh token is required"),
|
||||
refreshToken: z
|
||||
.string()
|
||||
.min(1, "Refresh token is required")
|
||||
.optional(),
|
||||
deviceId: z.string().optional(),
|
||||
});
|
||||
|
||||
|
||||
289
pnpm-lock.yaml
generated
289
pnpm-lock.yaml
generated
@ -7,10 +7,6 @@ settings:
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@sendgrid/mail':
|
||||
specifier: ^8.1.5
|
||||
version: 8.1.5
|
||||
devDependencies:
|
||||
'@eslint/eslintrc':
|
||||
specifier: ^3.3.1
|
||||
@ -94,8 +90,8 @@ importers:
|
||||
specifier: ^6.14.0
|
||||
version: 6.16.0(prisma@6.16.0(typescript@5.9.2))(typescript@5.9.2)
|
||||
'@sendgrid/mail':
|
||||
specifier: ^8.1.3
|
||||
version: 8.1.5
|
||||
specifier: ^8.1.6
|
||||
version: 8.1.6
|
||||
bcrypt:
|
||||
specifier: ^6.0.0
|
||||
version: 6.0.0
|
||||
@ -112,8 +108,8 @@ importers:
|
||||
specifier: ^1.4.7
|
||||
version: 1.4.7
|
||||
express:
|
||||
specifier: ^4.21.2
|
||||
version: 4.21.2
|
||||
specifier: ^5.1.0
|
||||
version: 5.1.0
|
||||
helmet:
|
||||
specifier: ^8.1.0
|
||||
version: 8.1.0
|
||||
@ -132,6 +128,9 @@ importers:
|
||||
nestjs-zod:
|
||||
specifier: ^5.0.1
|
||||
version: 5.0.1(@nestjs/common@11.1.6(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/swagger@11.2.0(@nestjs/common@11.1.6(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2)(rxjs@7.8.2))(@nestjs/core@11.1.6)(class-transformer@0.5.1)(class-validator@0.14.2)(reflect-metadata@0.2.2))(rxjs@7.8.2)(zod@4.1.9)
|
||||
p-queue:
|
||||
specifier: ^7.4.1
|
||||
version: 7.4.1
|
||||
passport:
|
||||
specifier: ^0.7.0
|
||||
version: 0.7.0
|
||||
@ -163,8 +162,8 @@ importers:
|
||||
specifier: ^2.0.0
|
||||
version: 2.0.0
|
||||
uuid:
|
||||
specifier: ^11.1.0
|
||||
version: 11.1.0
|
||||
specifier: ^13.0.0
|
||||
version: 13.0.0
|
||||
zod:
|
||||
specifier: ^4.1.9
|
||||
version: 4.1.9
|
||||
@ -209,8 +208,8 @@ importers:
|
||||
specifier: ^6.0.3
|
||||
version: 6.0.3
|
||||
'@types/uuid':
|
||||
specifier: ^10.0.0
|
||||
version: 10.0.0
|
||||
specifier: ^11.0.0
|
||||
version: 11.0.0
|
||||
jest:
|
||||
specifier: ^30.0.5
|
||||
version: 30.1.3(@types/node@24.3.1)(ts-node@10.9.2(@types/node@24.3.1)(typescript@5.9.2))
|
||||
@ -1552,8 +1551,8 @@ packages:
|
||||
resolution: {integrity: sha512-Ze7WuW2Xzy5GT5WRx+yEv89fsg/pgy3T1E3FS0QEx0/VvRmigMZ5qyVGhJz4SxomegDkzXv/i0aFPpHKN8qdAA==}
|
||||
engines: {node: '>= 12.0.0'}
|
||||
|
||||
'@sendgrid/mail@8.1.5':
|
||||
resolution: {integrity: sha512-W+YuMnkVs4+HA/bgfto4VHKcPKLc7NiZ50/NH2pzO6UHCCFuq8/GNB98YJlLEr/ESDyzAaDr7lVE7hoBwFTT3Q==}
|
||||
'@sendgrid/mail@8.1.6':
|
||||
resolution: {integrity: sha512-/ZqxUvKeEztU9drOoPC/8opEPOk+jLlB2q4+xpx6HVLq6aFu3pMpalkTpAQz8XfRfpLp8O25bh6pGPcHDCYpqg==}
|
||||
engines: {node: '>=12.*'}
|
||||
|
||||
'@sinclair/typebox@0.34.41':
|
||||
@ -1832,8 +1831,9 @@ packages:
|
||||
'@types/supertest@6.0.3':
|
||||
resolution: {integrity: sha512-8WzXq62EXFhJ7QsH3Ocb/iKQ/Ty9ZVWnVzoTKc9tyyFRRF3a74Tk2+TLFgaFFw364Ere+npzHKEJ6ga2LzIL7w==}
|
||||
|
||||
'@types/uuid@10.0.0':
|
||||
resolution: {integrity: sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ==}
|
||||
'@types/uuid@11.0.0':
|
||||
resolution: {integrity: sha512-HVyk8nj2m+jcFRNazzqyVKiZezyhDKrGUA3jlEcg/nZ6Ms+qHwocba1Y/AaVaznJTAM9xpdFSh+ptbNrhOGvZA==}
|
||||
deprecated: This is a stub types definition. uuid provides its own type definitions, so you do not need this installed.
|
||||
|
||||
'@types/validator@13.15.3':
|
||||
resolution: {integrity: sha512-7bcUmDyS6PN3EuD9SlGGOxM77F8WLVsrwkxyWxKnxzmXoequ6c7741QBrANq6htVRGOITJ7z72mTP6Z4XyuG+Q==}
|
||||
@ -2052,10 +2052,6 @@ packages:
|
||||
'@xtuc/long@4.2.2':
|
||||
resolution: {integrity: sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==}
|
||||
|
||||
accepts@1.3.8:
|
||||
resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
accepts@2.0.0:
|
||||
resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==}
|
||||
engines: {node: '>= 0.6'}
|
||||
@ -2172,9 +2168,6 @@ packages:
|
||||
resolution: {integrity: sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
array-flatten@1.1.1:
|
||||
resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==}
|
||||
|
||||
array-includes@3.1.9:
|
||||
resolution: {integrity: sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@ -2286,10 +2279,6 @@ packages:
|
||||
bl@4.1.0:
|
||||
resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==}
|
||||
|
||||
body-parser@1.20.3:
|
||||
resolution: {integrity: sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==}
|
||||
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
|
||||
|
||||
body-parser@2.2.0:
|
||||
resolution: {integrity: sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==}
|
||||
engines: {node: '>=18'}
|
||||
@ -2508,10 +2497,6 @@ packages:
|
||||
resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==}
|
||||
engines: {node: ^14.18.0 || >=16.10.0}
|
||||
|
||||
content-disposition@0.5.4:
|
||||
resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
content-disposition@1.0.0:
|
||||
resolution: {integrity: sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==}
|
||||
engines: {node: '>= 0.6'}
|
||||
@ -2534,10 +2519,6 @@ packages:
|
||||
resolution: {integrity: sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==}
|
||||
engines: {node: '>=6.6.0'}
|
||||
|
||||
cookie@0.7.1:
|
||||
resolution: {integrity: sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
cookie@0.7.2:
|
||||
resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==}
|
||||
engines: {node: '>= 0.6'}
|
||||
@ -2612,14 +2593,6 @@ packages:
|
||||
dateformat@4.6.3:
|
||||
resolution: {integrity: sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==}
|
||||
|
||||
debug@2.6.9:
|
||||
resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
|
||||
peerDependencies:
|
||||
supports-color: '*'
|
||||
peerDependenciesMeta:
|
||||
supports-color:
|
||||
optional: true
|
||||
|
||||
debug@3.2.7:
|
||||
resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==}
|
||||
peerDependencies:
|
||||
@ -2685,10 +2658,6 @@ packages:
|
||||
destr@2.0.5:
|
||||
resolution: {integrity: sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==}
|
||||
|
||||
destroy@1.2.0:
|
||||
resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==}
|
||||
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
|
||||
|
||||
detect-libc@2.0.4:
|
||||
resolution: {integrity: sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==}
|
||||
engines: {node: '>=8'}
|
||||
@ -2753,10 +2722,6 @@ packages:
|
||||
resolution: {integrity: sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==}
|
||||
engines: {node: '>=14'}
|
||||
|
||||
encodeurl@1.0.2:
|
||||
resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==}
|
||||
engines: {node: '>= 0.8'}
|
||||
|
||||
encodeurl@2.0.0:
|
||||
resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==}
|
||||
engines: {node: '>= 0.8'}
|
||||
@ -2977,6 +2942,9 @@ packages:
|
||||
resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
eventemitter3@5.0.1:
|
||||
resolution: {integrity: sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==}
|
||||
|
||||
events@3.3.0:
|
||||
resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==}
|
||||
engines: {node: '>=0.8.x'}
|
||||
@ -2993,10 +2961,6 @@ packages:
|
||||
resolution: {integrity: sha512-xvHszRavo28ejws8FpemjhwswGj4w/BetHIL8cU49u4sGyXDw2+p3YbeDbj6xzlxi6kWTjIRSTJ+9sNXPnF0Zg==}
|
||||
engines: {node: ^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0}
|
||||
|
||||
express@4.21.2:
|
||||
resolution: {integrity: sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==}
|
||||
engines: {node: '>= 0.10.0'}
|
||||
|
||||
express@5.1.0:
|
||||
resolution: {integrity: sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==}
|
||||
engines: {node: '>= 18'}
|
||||
@ -3083,10 +3047,6 @@ packages:
|
||||
resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
finalhandler@1.3.1:
|
||||
resolution: {integrity: sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==}
|
||||
engines: {node: '>= 0.8'}
|
||||
|
||||
finalhandler@2.1.0:
|
||||
resolution: {integrity: sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==}
|
||||
engines: {node: '>= 0.8'}
|
||||
@ -3142,10 +3102,6 @@ packages:
|
||||
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
fresh@0.5.2:
|
||||
resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
fresh@2.0.0:
|
||||
resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==}
|
||||
engines: {node: '>= 0.8'}
|
||||
@ -3322,10 +3278,6 @@ packages:
|
||||
engines: {node: '>=18'}
|
||||
hasBin: true
|
||||
|
||||
iconv-lite@0.4.24:
|
||||
resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
iconv-lite@0.6.3:
|
||||
resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
@ -3973,9 +3925,6 @@ packages:
|
||||
resolution: {integrity: sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==}
|
||||
engines: {node: '>= 4.0.0'}
|
||||
|
||||
merge-descriptors@1.0.3:
|
||||
resolution: {integrity: sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==}
|
||||
|
||||
merge-descriptors@2.0.0:
|
||||
resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==}
|
||||
engines: {node: '>=18'}
|
||||
@ -4011,11 +3960,6 @@ packages:
|
||||
resolution: {integrity: sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
mime@1.6.0:
|
||||
resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==}
|
||||
engines: {node: '>=4'}
|
||||
hasBin: true
|
||||
|
||||
mime@2.6.0:
|
||||
resolution: {integrity: sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg==}
|
||||
engines: {node: '>=4.0.0'}
|
||||
@ -4056,9 +4000,6 @@ packages:
|
||||
engines: {node: '>=10'}
|
||||
hasBin: true
|
||||
|
||||
ms@2.0.0:
|
||||
resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==}
|
||||
|
||||
ms@2.1.3:
|
||||
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
|
||||
|
||||
@ -4096,10 +4037,6 @@ packages:
|
||||
natural-compare@1.4.0:
|
||||
resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==}
|
||||
|
||||
negotiator@0.6.3:
|
||||
resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
negotiator@1.0.0:
|
||||
resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==}
|
||||
engines: {node: '>= 0.6'}
|
||||
@ -4285,6 +4222,14 @@ packages:
|
||||
resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
p-queue@7.4.1:
|
||||
resolution: {integrity: sha512-vRpMXmIkYF2/1hLBKisKeVYJZ8S2tZ0zEAmIJgdVKP2nq0nh4qCdf8bgw+ZgKrkh71AOCaqzwbJJk1WtdcF3VA==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
p-timeout@5.1.0:
|
||||
resolution: {integrity: sha512-auFDyzzzGZZZdHz3BtET9VEz0SE/uMEAx7uWfGPucfzEwwe/xH0iVeZibQmANYE/hp9T2+UUZT5m+BKyrDp3Ew==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
p-try@2.2.0:
|
||||
resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==}
|
||||
engines: {node: '>=6'}
|
||||
@ -4342,9 +4287,6 @@ packages:
|
||||
resolution: {integrity: sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==}
|
||||
engines: {node: 20 || >=22}
|
||||
|
||||
path-to-regexp@0.1.12:
|
||||
resolution: {integrity: sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==}
|
||||
|
||||
path-to-regexp@8.2.0:
|
||||
resolution: {integrity: sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==}
|
||||
engines: {node: '>=16'}
|
||||
@ -4478,10 +4420,6 @@ packages:
|
||||
pure-rand@7.0.1:
|
||||
resolution: {integrity: sha512-oTUZM/NAZS8p7ANR3SHh30kXB+zK2r2BPcEn/awJIbOvq82WoMN4p62AWWp3Hhw50G0xMsw1mhIBLqHw64EcNQ==}
|
||||
|
||||
qs@6.13.0:
|
||||
resolution: {integrity: sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==}
|
||||
engines: {node: '>=0.6'}
|
||||
|
||||
qs@6.14.0:
|
||||
resolution: {integrity: sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==}
|
||||
engines: {node: '>=0.6'}
|
||||
@ -4499,10 +4437,6 @@ packages:
|
||||
resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
raw-body@2.5.2:
|
||||
resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==}
|
||||
engines: {node: '>= 0.8'}
|
||||
|
||||
raw-body@3.0.1:
|
||||
resolution: {integrity: sha512-9G8cA+tuMS75+6G/TzW8OtLzmBDMo8p1JRxN5AZ+LAp8uxGA8V8GZm4GQ4/N5QNQEnLmg6SS7wyuSmbKepiKqA==}
|
||||
engines: {node: '>= 0.10'}
|
||||
@ -4674,10 +4608,6 @@ packages:
|
||||
engines: {node: '>=10'}
|
||||
hasBin: true
|
||||
|
||||
send@0.19.0:
|
||||
resolution: {integrity: sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
|
||||
send@1.2.0:
|
||||
resolution: {integrity: sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==}
|
||||
engines: {node: '>= 18'}
|
||||
@ -4689,10 +4619,6 @@ packages:
|
||||
serialize-javascript@6.0.2:
|
||||
resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==}
|
||||
|
||||
serve-static@1.16.2:
|
||||
resolution: {integrity: sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
|
||||
serve-static@2.2.0:
|
||||
resolution: {integrity: sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==}
|
||||
engines: {node: '>= 18'}
|
||||
@ -5197,8 +5123,8 @@ packages:
|
||||
resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==}
|
||||
engines: {node: '>= 0.4.0'}
|
||||
|
||||
uuid@11.1.0:
|
||||
resolution: {integrity: sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==}
|
||||
uuid@13.0.0:
|
||||
resolution: {integrity: sha512-XQegIaBTVUjSHliKqcnFqYypAd4S+WCYt5NIeRs6w/UAry7z8Y9j5ZwRRL4kzq9U3sD6v+85er9FvkEaBpji2w==}
|
||||
hasBin: true
|
||||
|
||||
uuid@9.0.1:
|
||||
@ -6575,7 +6501,7 @@ snapshots:
|
||||
dependencies:
|
||||
deepmerge: 4.3.1
|
||||
|
||||
'@sendgrid/mail@8.1.5':
|
||||
'@sendgrid/mail@8.1.6':
|
||||
dependencies:
|
||||
'@sendgrid/client': 8.1.5
|
||||
'@sendgrid/helpers': 8.0.0
|
||||
@ -6878,7 +6804,9 @@ snapshots:
|
||||
'@types/methods': 1.1.4
|
||||
'@types/superagent': 8.1.9
|
||||
|
||||
'@types/uuid@10.0.0': {}
|
||||
'@types/uuid@11.0.0':
|
||||
dependencies:
|
||||
uuid: 13.0.0
|
||||
|
||||
'@types/validator@13.15.3': {}
|
||||
|
||||
@ -7122,11 +7050,6 @@ snapshots:
|
||||
|
||||
'@xtuc/long@4.2.2': {}
|
||||
|
||||
accepts@1.3.8:
|
||||
dependencies:
|
||||
mime-types: 2.1.35
|
||||
negotiator: 0.6.3
|
||||
|
||||
accepts@2.0.0:
|
||||
dependencies:
|
||||
mime-types: 3.0.1
|
||||
@ -7225,8 +7148,6 @@ snapshots:
|
||||
call-bound: 1.0.4
|
||||
is-array-buffer: 3.0.5
|
||||
|
||||
array-flatten@1.1.1: {}
|
||||
|
||||
array-includes@3.1.9:
|
||||
dependencies:
|
||||
call-bind: 1.0.8
|
||||
@ -7394,23 +7315,6 @@ snapshots:
|
||||
inherits: 2.0.4
|
||||
readable-stream: 3.6.2
|
||||
|
||||
body-parser@1.20.3:
|
||||
dependencies:
|
||||
bytes: 3.1.2
|
||||
content-type: 1.0.5
|
||||
debug: 2.6.9
|
||||
depd: 2.0.0
|
||||
destroy: 1.2.0
|
||||
http-errors: 2.0.0
|
||||
iconv-lite: 0.4.24
|
||||
on-finished: 2.4.1
|
||||
qs: 6.13.0
|
||||
raw-body: 2.5.2
|
||||
type-is: 1.6.18
|
||||
unpipe: 1.0.0
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
body-parser@2.2.0:
|
||||
dependencies:
|
||||
bytes: 3.1.2
|
||||
@ -7640,10 +7544,6 @@ snapshots:
|
||||
|
||||
consola@3.4.2: {}
|
||||
|
||||
content-disposition@0.5.4:
|
||||
dependencies:
|
||||
safe-buffer: 5.2.1
|
||||
|
||||
content-disposition@1.0.0:
|
||||
dependencies:
|
||||
safe-buffer: 5.2.1
|
||||
@ -7661,8 +7561,6 @@ snapshots:
|
||||
|
||||
cookie-signature@1.2.2: {}
|
||||
|
||||
cookie@0.7.1: {}
|
||||
|
||||
cookie@0.7.2: {}
|
||||
|
||||
cookiejar@2.1.4: {}
|
||||
@ -7733,10 +7631,6 @@ snapshots:
|
||||
|
||||
dateformat@4.6.3: {}
|
||||
|
||||
debug@2.6.9:
|
||||
dependencies:
|
||||
ms: 2.0.0
|
||||
|
||||
debug@3.2.7:
|
||||
dependencies:
|
||||
ms: 2.1.3
|
||||
@ -7779,8 +7673,6 @@ snapshots:
|
||||
|
||||
destr@2.0.5: {}
|
||||
|
||||
destroy@1.2.0: {}
|
||||
|
||||
detect-libc@2.0.4: {}
|
||||
|
||||
detect-newline@3.1.0: {}
|
||||
@ -7833,8 +7725,6 @@ snapshots:
|
||||
|
||||
empathic@2.0.0: {}
|
||||
|
||||
encodeurl@1.0.2: {}
|
||||
|
||||
encodeurl@2.0.0: {}
|
||||
|
||||
end-of-stream@1.4.5:
|
||||
@ -8209,6 +8099,8 @@ snapshots:
|
||||
|
||||
etag@1.8.1: {}
|
||||
|
||||
eventemitter3@5.0.1: {}
|
||||
|
||||
events@3.3.0: {}
|
||||
|
||||
execa@5.1.1:
|
||||
@ -8234,42 +8126,6 @@ snapshots:
|
||||
jest-mock: 30.0.5
|
||||
jest-util: 30.0.5
|
||||
|
||||
express@4.21.2:
|
||||
dependencies:
|
||||
accepts: 1.3.8
|
||||
array-flatten: 1.1.1
|
||||
body-parser: 1.20.3
|
||||
content-disposition: 0.5.4
|
||||
content-type: 1.0.5
|
||||
cookie: 0.7.1
|
||||
cookie-signature: 1.0.6
|
||||
debug: 2.6.9
|
||||
depd: 2.0.0
|
||||
encodeurl: 2.0.0
|
||||
escape-html: 1.0.3
|
||||
etag: 1.8.1
|
||||
finalhandler: 1.3.1
|
||||
fresh: 0.5.2
|
||||
http-errors: 2.0.0
|
||||
merge-descriptors: 1.0.3
|
||||
methods: 1.1.2
|
||||
on-finished: 2.4.1
|
||||
parseurl: 1.3.3
|
||||
path-to-regexp: 0.1.12
|
||||
proxy-addr: 2.0.7
|
||||
qs: 6.13.0
|
||||
range-parser: 1.2.1
|
||||
safe-buffer: 5.2.1
|
||||
send: 0.19.0
|
||||
serve-static: 1.16.2
|
||||
setprototypeof: 1.2.0
|
||||
statuses: 2.0.1
|
||||
type-is: 1.6.18
|
||||
utils-merge: 1.0.1
|
||||
vary: 1.1.2
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
express@5.1.0:
|
||||
dependencies:
|
||||
accepts: 2.0.0
|
||||
@ -8388,18 +8244,6 @@ snapshots:
|
||||
dependencies:
|
||||
to-regex-range: 5.0.1
|
||||
|
||||
finalhandler@1.3.1:
|
||||
dependencies:
|
||||
debug: 2.6.9
|
||||
encodeurl: 2.0.0
|
||||
escape-html: 1.0.3
|
||||
on-finished: 2.4.1
|
||||
parseurl: 1.3.3
|
||||
statuses: 2.0.1
|
||||
unpipe: 1.0.0
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
finalhandler@2.1.0:
|
||||
dependencies:
|
||||
debug: 4.4.1
|
||||
@ -8472,8 +8316,6 @@ snapshots:
|
||||
|
||||
forwarded@0.2.0: {}
|
||||
|
||||
fresh@0.5.2: {}
|
||||
|
||||
fresh@2.0.0: {}
|
||||
|
||||
fs-extra@10.1.0:
|
||||
@ -8659,10 +8501,6 @@ snapshots:
|
||||
|
||||
husky@9.1.7: {}
|
||||
|
||||
iconv-lite@0.4.24:
|
||||
dependencies:
|
||||
safer-buffer: 2.1.2
|
||||
|
||||
iconv-lite@0.6.3:
|
||||
dependencies:
|
||||
safer-buffer: 2.1.2
|
||||
@ -9497,8 +9335,6 @@ snapshots:
|
||||
dependencies:
|
||||
fs-monkey: 1.1.0
|
||||
|
||||
merge-descriptors@1.0.3: {}
|
||||
|
||||
merge-descriptors@2.0.0: {}
|
||||
|
||||
merge-stream@2.0.0: {}
|
||||
@ -9524,8 +9360,6 @@ snapshots:
|
||||
dependencies:
|
||||
mime-db: 1.54.0
|
||||
|
||||
mime@1.6.0: {}
|
||||
|
||||
mime@2.6.0: {}
|
||||
|
||||
mimic-fn@2.1.0: {}
|
||||
@ -9556,8 +9390,6 @@ snapshots:
|
||||
|
||||
mkdirp@3.0.1: {}
|
||||
|
||||
ms@2.0.0: {}
|
||||
|
||||
ms@2.1.3: {}
|
||||
|
||||
msgpackr-extract@3.0.3:
|
||||
@ -9601,8 +9433,6 @@ snapshots:
|
||||
|
||||
natural-compare@1.4.0: {}
|
||||
|
||||
negotiator@0.6.3: {}
|
||||
|
||||
negotiator@1.0.0: {}
|
||||
|
||||
neo-async@2.6.2: {}
|
||||
@ -9797,6 +9627,13 @@ snapshots:
|
||||
dependencies:
|
||||
p-limit: 3.1.0
|
||||
|
||||
p-queue@7.4.1:
|
||||
dependencies:
|
||||
eventemitter3: 5.0.1
|
||||
p-timeout: 5.1.0
|
||||
|
||||
p-timeout@5.1.0: {}
|
||||
|
||||
p-try@2.2.0: {}
|
||||
|
||||
package-json-from-dist@1.0.1: {}
|
||||
@ -9849,8 +9686,6 @@ snapshots:
|
||||
lru-cache: 11.2.1
|
||||
minipass: 7.1.2
|
||||
|
||||
path-to-regexp@0.1.12: {}
|
||||
|
||||
path-to-regexp@8.2.0: {}
|
||||
|
||||
path-type@4.0.0: {}
|
||||
@ -10004,10 +9839,6 @@ snapshots:
|
||||
|
||||
pure-rand@7.0.1: {}
|
||||
|
||||
qs@6.13.0:
|
||||
dependencies:
|
||||
side-channel: 1.1.0
|
||||
|
||||
qs@6.14.0:
|
||||
dependencies:
|
||||
side-channel: 1.1.0
|
||||
@ -10022,13 +9853,6 @@ snapshots:
|
||||
|
||||
range-parser@1.2.1: {}
|
||||
|
||||
raw-body@2.5.2:
|
||||
dependencies:
|
||||
bytes: 3.1.2
|
||||
http-errors: 2.0.0
|
||||
iconv-lite: 0.4.24
|
||||
unpipe: 1.0.0
|
||||
|
||||
raw-body@3.0.1:
|
||||
dependencies:
|
||||
bytes: 3.1.2
|
||||
@ -10213,24 +10037,6 @@ snapshots:
|
||||
|
||||
semver@7.7.2: {}
|
||||
|
||||
send@0.19.0:
|
||||
dependencies:
|
||||
debug: 2.6.9
|
||||
depd: 2.0.0
|
||||
destroy: 1.2.0
|
||||
encodeurl: 1.0.2
|
||||
escape-html: 1.0.3
|
||||
etag: 1.8.1
|
||||
fresh: 0.5.2
|
||||
http-errors: 2.0.0
|
||||
mime: 1.6.0
|
||||
ms: 2.1.3
|
||||
on-finished: 2.4.1
|
||||
range-parser: 1.2.1
|
||||
statuses: 2.0.1
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
send@1.2.0:
|
||||
dependencies:
|
||||
debug: 4.4.1
|
||||
@ -10253,15 +10059,6 @@ snapshots:
|
||||
dependencies:
|
||||
randombytes: 2.1.0
|
||||
|
||||
serve-static@1.16.2:
|
||||
dependencies:
|
||||
encodeurl: 2.0.0
|
||||
escape-html: 1.0.3
|
||||
parseurl: 1.3.3
|
||||
send: 0.19.0
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
serve-static@2.2.0:
|
||||
dependencies:
|
||||
encodeurl: 2.0.0
|
||||
@ -10844,7 +10641,7 @@ snapshots:
|
||||
|
||||
utils-merge@1.0.1: {}
|
||||
|
||||
uuid@11.1.0: {}
|
||||
uuid@13.0.0: {}
|
||||
|
||||
uuid@9.0.1: {}
|
||||
|
||||
|
||||
14
scripts/bundle-analyze.sh
Executable file
14
scripts/bundle-analyze.sh
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
echo "📊 Analyzing bundle sizes..."
|
||||
|
||||
# Frontend bundle analysis
|
||||
echo "🎯 Frontend bundle analysis..."
|
||||
cd apps/portal
|
||||
pnpm run build:analyze
|
||||
echo "✅ Frontend analysis complete - check browser for results"
|
||||
|
||||
cd ../..
|
||||
|
||||
echo "🎉 Bundle analysis complete!"
|
||||
27
scripts/migrate-field-map.sh
Executable file
27
scripts/migrate-field-map.sh
Executable file
@ -0,0 +1,27 @@
|
||||
#!/bin/bash
|
||||
# Script to migrate remaining getSalesforceFieldMap usages to SalesforceFieldMapService
|
||||
|
||||
echo "🔄 Migrating remaining field map usages..."
|
||||
|
||||
# Files that still need migration
|
||||
FILES=(
|
||||
"apps/bff/src/modules/orders/services/order-fulfillment-validator.service.ts"
|
||||
"apps/bff/src/modules/orders/services/order-builder.service.ts"
|
||||
"apps/bff/src/integrations/salesforce/salesforce.service.ts"
|
||||
"apps/bff/src/modules/orders/services/order-fulfillment-orchestrator.service.ts"
|
||||
"apps/bff/src/modules/orders/services/order-pricebook.service.ts"
|
||||
)
|
||||
|
||||
echo "⚠️ The following files still need manual migration:"
|
||||
for file in "${FILES[@]}"; do
|
||||
echo " - $file"
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "📝 These files use getSalesforceFieldMap() and need to be updated to:"
|
||||
echo " 1. Inject SalesforceFieldMapService in constructor"
|
||||
echo " 2. Call this.fieldMapService.getFieldMap() instead"
|
||||
echo " 3. Import CoreConfigModule in their respective modules"
|
||||
|
||||
echo ""
|
||||
echo "🎯 After manual migration, the deprecated functions can be completely removed."
|
||||
119
scripts/validate-deps.sh
Executable file
119
scripts/validate-deps.sh
Executable file
@ -0,0 +1,119 @@
|
||||
#!/bin/bash
|
||||
# 🔍 Dependency Validation Script
|
||||
# Validates dependency integrity, checks for version drift, and security issues
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "🔍 Validating dependencies..."
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Check if pnpm is available
|
||||
if ! command -v pnpm &> /dev/null; then
|
||||
echo -e "${RED}❌ pnpm is not installed${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 1. Validate lockfile integrity
|
||||
echo "📋 Checking lockfile integrity..."
|
||||
if pnpm install --frozen-lockfile --ignore-scripts; then
|
||||
echo -e "${GREEN}✅ Lockfile integrity validated${NC}"
|
||||
else
|
||||
echo -e "${RED}❌ Lockfile integrity check failed${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 2. Check for dependency version drift
|
||||
echo "🔍 Checking for dependency version drift..."
|
||||
pnpm list --recursive --depth=0 --json > /tmp/deps.json
|
||||
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const deps = JSON.parse(fs.readFileSync('/tmp/deps.json', 'utf8'));
|
||||
const allDeps = new Map();
|
||||
|
||||
deps.forEach(pkg => {
|
||||
if (pkg.dependencies) {
|
||||
Object.entries(pkg.dependencies).forEach(([name, info]) => {
|
||||
const version = info.version;
|
||||
if (!allDeps.has(name)) {
|
||||
allDeps.set(name, new Set());
|
||||
}
|
||||
allDeps.get(name).add(\`\${pkg.name}@\${version}\`);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
let hasDrift = false;
|
||||
allDeps.forEach((versions, depName) => {
|
||||
if (versions.size > 1) {
|
||||
console.log(\`❌ Version drift detected for \${depName}:\`);
|
||||
versions.forEach(v => console.log(\` - \${v}\`));
|
||||
hasDrift = true;
|
||||
}
|
||||
});
|
||||
|
||||
if (hasDrift) {
|
||||
console.log('\\n💡 Fix version drift by aligning dependency versions across workspaces.');
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log('✅ No dependency version drift detected.');
|
||||
}
|
||||
"
|
||||
|
||||
# 3. Security audit
|
||||
echo "🔒 Running security audit..."
|
||||
if pnpm audit --audit-level moderate; then
|
||||
echo -e "${GREEN}✅ Security audit passed${NC}"
|
||||
else
|
||||
echo -e "${YELLOW}⚠️ Security vulnerabilities found. Review and update dependencies.${NC}"
|
||||
fi
|
||||
|
||||
# 4. Check for outdated dependencies
|
||||
echo "📅 Checking for outdated dependencies..."
|
||||
pnpm outdated --recursive || echo -e "${YELLOW}⚠️ Some dependencies are outdated${NC}"
|
||||
|
||||
# 5. Validate workspace configuration
|
||||
echo "⚙️ Validating workspace configuration..."
|
||||
node -e "
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
try {
|
||||
const rootPkg = JSON.parse(fs.readFileSync('package.json', 'utf8'));
|
||||
console.log('✅ Root package.json is valid');
|
||||
|
||||
const workspaceConfig = fs.readFileSync('pnpm-workspace.yaml', 'utf8');
|
||||
console.log('✅ pnpm-workspace.yaml is readable');
|
||||
|
||||
// Check that all workspace packages exist
|
||||
const workspaces = ['apps/*', 'packages/*'];
|
||||
let allValid = true;
|
||||
|
||||
workspaces.forEach(workspace => {
|
||||
const workspacePath = workspace.replace('/*', '');
|
||||
if (!fs.existsSync(workspacePath)) {
|
||||
console.log(\`❌ Workspace path does not exist: \${workspacePath}\`);
|
||||
allValid = false;
|
||||
}
|
||||
});
|
||||
|
||||
if (!allValid) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('✅ All workspace paths are valid');
|
||||
} catch (error) {
|
||||
console.log(\`❌ Workspace validation failed: \${error.message}\`);
|
||||
process.exit(1);
|
||||
}
|
||||
"
|
||||
|
||||
# Cleanup
|
||||
rm -f /tmp/deps.json
|
||||
|
||||
echo -e "${GREEN}🎉 Dependency validation completed successfully!${NC}"
|
||||
Loading…
x
Reference in New Issue
Block a user