feat: Goa GEL Blockchain e-Licensing Platform - Full Stack Implementation

Complete implementation of the Goa Government e-Licensing platform with:

Backend:
- NestJS API with JWT authentication
- PostgreSQL database with Knex ORM
- Redis caching and session management
- MinIO document storage
- Hyperledger Besu blockchain integration
- Multi-department workflow system
- Comprehensive API tests (266/282 passing)

Frontend:
- Angular 21 with standalone components
- Angular Material + TailwindCSS UI
- Visual workflow builder
- Document upload with progress tracking
- Blockchain explorer integration
- Role-based dashboards (Admin, Department, Citizen)
- E2E tests with Playwright (37 tests)

Infrastructure:
- Docker Compose orchestration
- Blockscout blockchain explorer
- Development and production configurations
This commit is contained in:
Mahi
2026-02-07 10:23:29 -04:00
commit 80566bf0a2
441 changed files with 102418 additions and 0 deletions

15
backend/.dockerignore Normal file
View File

@@ -0,0 +1,15 @@
node_modules
npm-debug.log
dist
coverage
.git
.gitignore
README.md
.env.example
.eslintrc.js
.prettierrc
jest.config.js
test
logs
*.log
.DS_Store

12
backend/.editorconfig Normal file
View File

@@ -0,0 +1,12 @@
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[*.md]
trim_trailing_whitespace = false

73
backend/.env.example Normal file
View File

@@ -0,0 +1,73 @@
# ===========================================
# Goa GEL Backend Environment Configuration
# ===========================================
# Application
NODE_ENV=development
PORT=3001
API_VERSION=v1
API_PREFIX=api
# Database (PostgreSQL)
DATABASE_HOST=localhost
DATABASE_PORT=5432
DATABASE_NAME=goa_gel_platform
DATABASE_USER=postgres
DATABASE_PASSWORD=postgres_secure_password
DATABASE_SSL=false
DATABASE_LOGGING=true
# Blockchain (Hyperledger Besu)
BESU_RPC_URL=http://localhost:8545
BESU_CHAIN_ID=1337
BESU_NETWORK_ID=2024
CONTRACT_ADDRESS_LICENSE_NFT=0x0000000000000000000000000000000000000001
CONTRACT_ADDRESS_APPROVAL_MANAGER=0x0000000000000000000000000000000000000002
CONTRACT_ADDRESS_DEPARTMENT_REGISTRY=0x0000000000000000000000000000000000000003
CONTRACT_ADDRESS_WORKFLOW_REGISTRY=0x0000000000000000000000000000000000000004
PLATFORM_WALLET_PRIVATE_KEY=0x0000000000000000000000000000000000000000000000000000000000000001
# MinIO (S3-Compatible Storage)
MINIO_ENDPOINT=localhost
MINIO_PORT=9000
MINIO_ACCESS_KEY=minioadmin
MINIO_SECRET_KEY=minioadmin_secure_password
MINIO_BUCKET_DOCUMENTS=goa-gel-documents
MINIO_USE_SSL=false
# Redis
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_PASSWORD=
REDIS_DB=0
# Security
JWT_SECRET=your-super-secure-jwt-secret-key-min-32-chars
JWT_EXPIRATION=1d
JWT_REFRESH_EXPIRATION=7d
API_KEY_SALT_ROUNDS=10
WEBHOOK_SIGNATURE_ALGORITHM=sha256
# File Upload
MAX_FILE_SIZE=10485760
ALLOWED_MIME_TYPES=application/pdf,image/jpeg,image/png,image/jpg
# Rate Limiting
RATE_LIMIT_TTL=60
RATE_LIMIT_GLOBAL=100
RATE_LIMIT_API_KEY=1000
RATE_LIMIT_UPLOAD=10
# Logging
LOG_LEVEL=info
LOG_FORMAT=json
# CORS
CORS_ORIGIN=http://localhost:3000
CORS_CREDENTIALS=true
# Swagger
SWAGGER_ENABLED=true
SWAGGER_TITLE=Goa GEL API
SWAGGER_DESCRIPTION=Blockchain Document Verification Platform API
SWAGGER_VERSION=1.0.0

29
backend/.eslintrc.js Normal file
View File

@@ -0,0 +1,29 @@
module.exports = {
parser: '@typescript-eslint/parser',
parserOptions: {
project: 'tsconfig.json',
tsconfigRootDir: __dirname,
sourceType: 'module',
},
plugins: ['@typescript-eslint/eslint-plugin'],
extends: [
'plugin:@typescript-eslint/recommended',
'plugin:prettier/recommended',
],
root: true,
env: {
node: true,
jest: true,
},
ignorePatterns: ['.eslintrc.js', 'dist/', 'node_modules/'],
rules: {
'@typescript-eslint/interface-name-prefix': 'off',
'@typescript-eslint/explicit-function-return-type': 'warn',
'@typescript-eslint/explicit-module-boundary-types': 'warn',
'@typescript-eslint/no-explicit-any': 'error',
'@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_' }],
'@typescript-eslint/no-floating-promises': 'error',
'no-console': 'error',
'prefer-const': 'error',
},
};

54
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,54 @@
# Dependencies
node_modules/
npm-debug.log
yarn-error.log
# Build
dist/
build/
*.tsbuildinfo
# Environment
.env
.env.local
.env.*.local
.env.production.local
# IDE
.idea/
.vscode/
*.swp
*.swo
*~
.DS_Store
*.sublime-project
*.sublime-workspace
# Testing
coverage/
.nyc_output/
jest.results.json
# Logs
logs/
*.log
lerna-debug.log
# OS
.AppleDouble
.LSOverride
Thumbs.db
# Cache
.cache/
.npm
.eslintcache
# Temporary files
tmp/
temp/
*.tmp
# Docker
.docker/
docker-compose.override.yml

9
backend/.prettierrc Normal file
View File

@@ -0,0 +1,9 @@
{
"singleQuote": true,
"trailingComma": "all",
"tabWidth": 2,
"semi": true,
"printWidth": 100,
"bracketSpacing": true,
"arrowParens": "avoid"
}

583
backend/DATABASE_SETUP.md Normal file
View File

@@ -0,0 +1,583 @@
# Goa GEL Database Setup - Complete Guide
## Overview
This document provides a comprehensive overview of the complete database setup for the Goa GEL Blockchain Document Verification Platform, including all 12 entities, complete migrations, seeders, and configuration.
## Created Files Summary
### 1. Entity Files (12 entities)
All entities are located in `/src/database/entities/`:
| Entity | File | Purpose |
|--------|------|---------|
| Applicant | `applicant.entity.ts` | Represents license applicants with wallet integration |
| Department | `department.entity.ts` | Government departments handling approvals |
| Workflow | `workflow.entity.ts` | Multi-stage approval workflow definitions |
| LicenseRequest | `license-request.entity.ts` | Main license application entity (8 statuses) |
| Document | `document.entity.ts` | Uploaded documents with versioning support |
| DocumentVersion | `document-version.entity.ts` | Version history for documents (SHA-256 hashing) |
| Approval | `approval.entity.ts` | Department-level approvals (5 statuses) |
| WorkflowState | `workflow-state.entity.ts` | Execution state tracking with full audit |
| Webhook | `webhook.entity.ts` | Department webhook configurations |
| WebhookLog | `webhook-log.entity.ts` | Webhook delivery audit trail (retry tracking) |
| AuditLog | `audit-log.entity.ts` | Comprehensive change audit with actor tracking |
| BlockchainTransaction | `blockchain-transaction.entity.ts` | NFT minting and on-chain operations (5 tx types) |
**Index File:** `entities/index.ts` - Exports all entities and enums
### 2. Core Configuration
- **data-source.ts** - TypeORM DataSource with PostgreSQL configuration
- Environment variable driven
- Connection pooling configured
- Logging support for development
- All 12 entities registered
### 3. Migrations
- **1704067200000-InitialSchema.ts** - Complete initial schema
- 12 tables created with proper constraints
- 7 custom PostgreSQL enums
- 40+ indexes for performance optimization
- Foreign key relationships with cascade delete
- Complete down migration for rollback
### 4. Database Seeders
- **seeders/seed.ts** - Sample data generator
- Creates 4 sample departments (Fire, Tourism, Municipal, Health)
- Defines RESORT_LICENSE workflow with 5 stages
- Creates 2 sample applicants
- Creates 1 license request in DRAFT status
- Generates workflow state with execution log
### 5. Documentation
- **src/database/README.md** - Comprehensive database documentation
- **DATABASE_SETUP.md** - This file
## Database Schema Details
### Table: applicants
```typescript
id: UUID (PK)
digilockerId: varchar(255) UNIQUE
name: varchar(255)
email: varchar(255) UNIQUE
phone: varchar(20)
walletAddress: varchar(255) UNIQUE
isActive: boolean (default: true)
createdAt: TIMESTAMP WITH TIME ZONE
updatedAt: TIMESTAMP WITH TIME ZONE
Indexes:
- digilockerId
- walletAddress
- email
```
### Table: departments
```typescript
id: UUID (PK)
code: varchar(50) UNIQUE
name: varchar(255)
walletAddress: varchar(255) UNIQUE
apiKeyHash: varchar(255)
apiSecretHash: varchar(255)
webhookUrl: text NULLABLE
webhookSecretHash: varchar(255) NULLABLE
isActive: boolean (default: true)
createdAt: TIMESTAMP WITH TIME ZONE
updatedAt: TIMESTAMP WITH TIME ZONE
Indexes:
- code
- walletAddress
```
### Table: workflows
```typescript
id: UUID (PK)
workflowType: varchar(100) UNIQUE
name: varchar(255)
description: text NULLABLE
version: integer (default: 1)
definition: jsonb
isActive: boolean (default: true)
createdBy: UUID NULLABLE
createdAt: TIMESTAMP WITH TIME ZONE
updatedAt: TIMESTAMP WITH TIME ZONE
Indexes:
- workflowType
```
### Table: license_requests
```typescript
id: UUID (PK)
requestNumber: varchar(50) UNIQUE (auto-generated: RL-YYYY-XXXXXX)
tokenId: bigint NULLABLE
applicantId: UUID (FK: applicants)
requestType: varchar(50)
workflowId: UUID (FK: workflows)
status: ENUM (DRAFT, SUBMITTED, IN_REVIEW, PENDING_RESUBMISSION, APPROVED, REJECTED, REVOKED, CANCELLED)
metadata: jsonb (default: {})
currentStageId: varchar(100) NULLABLE
blockchainTxHash: varchar(255) NULLABLE
createdAt: TIMESTAMP WITH TIME ZONE
updatedAt: TIMESTAMP WITH TIME ZONE
submittedAt: TIMESTAMP WITH TIME ZONE NULLABLE
approvedAt: TIMESTAMP WITH TIME ZONE NULLABLE
Indexes:
- requestNumber
- applicantId
- workflowId
- status
- createdAt
- (applicantId, status)
```
### Table: documents
```typescript
id: UUID (PK)
requestId: UUID (FK: license_requests)
docType: varchar(100)
originalFilename: varchar(255)
currentVersion: integer (default: 1)
currentHash: varchar(64) [SHA-256]
minioBucket: varchar(255)
isActive: boolean (default: true)
createdAt: TIMESTAMP WITH TIME ZONE
updatedAt: TIMESTAMP WITH TIME ZONE
Indexes:
- requestId
- (requestId, docType)
- currentHash
```
### Table: document_versions
```typescript
id: UUID (PK)
documentId: UUID (FK: documents)
version: integer
hash: varchar(64) [SHA-256]
minioPath: text
fileSize: bigint
mimeType: varchar(100)
uploadedBy: UUID
blockchainTxHash: varchar(255) NULLABLE
createdAt: TIMESTAMP WITH TIME ZONE
Indexes:
- documentId
- hash
Unique Constraint:
- (documentId, version)
```
### Table: approvals
```typescript
id: UUID (PK)
requestId: UUID (FK: license_requests)
departmentId: UUID (FK: departments)
status: ENUM (PENDING, APPROVED, REJECTED, CHANGES_REQUESTED, REVIEW_REQUIRED)
remarks: text NULLABLE
remarksHash: varchar(64) NULLABLE [SHA-256]
reviewedDocuments: jsonb (array of UUIDs, default: [])
blockchainTxHash: varchar(255) NULLABLE
isActive: boolean (default: true)
invalidatedAt: TIMESTAMP WITH TIME ZONE NULLABLE
invalidationReason: varchar(255) NULLABLE
createdAt: TIMESTAMP WITH TIME ZONE
updatedAt: TIMESTAMP WITH TIME ZONE
Indexes:
- requestId
- departmentId
- status
- (requestId, departmentId)
- (requestId, status)
```
### Table: workflow_states
```typescript
id: UUID (PK)
requestId: UUID (FK: license_requests) UNIQUE
currentStageId: varchar(100)
completedStages: jsonb (array of stage IDs, default: [])
pendingApprovals: jsonb (array of {departmentCode, status, createdAt}, default: [])
executionLog: jsonb (array of {timestamp, stageId, action, details}, default: [])
stageStartedAt: TIMESTAMP WITH TIME ZONE NULLABLE
createdAt: TIMESTAMP WITH TIME ZONE
updatedAt: TIMESTAMP WITH TIME ZONE
Indexes:
- requestId
```
### Table: webhooks
```typescript
id: UUID (PK)
departmentId: UUID (FK: departments)
url: text
events: jsonb (array of event types, default: [])
secretHash: varchar(255)
isActive: boolean (default: true)
createdAt: TIMESTAMP WITH TIME ZONE
updatedAt: TIMESTAMP WITH TIME ZONE
Indexes:
- departmentId
- (departmentId, isActive)
```
### Table: webhook_logs
```typescript
id: UUID (PK)
webhookId: UUID (FK: webhooks)
eventType: varchar(100)
payload: jsonb
responseStatus: integer NULLABLE
responseBody: text NULLABLE
responseTime: integer NULLABLE [milliseconds]
retryCount: integer (default: 0)
status: ENUM (PENDING, SUCCESS, FAILED)
createdAt: TIMESTAMP WITH TIME ZONE
Indexes:
- webhookId
- eventType
- status
- createdAt
- (webhookId, status)
```
### Table: audit_logs
```typescript
id: UUID (PK)
entityType: ENUM (REQUEST, APPROVAL, DOCUMENT, DEPARTMENT, WORKFLOW)
entityId: UUID
action: varchar(100)
actorType: ENUM (APPLICANT, DEPARTMENT, SYSTEM, ADMIN)
actorId: UUID NULLABLE
oldValue: jsonb NULLABLE
newValue: jsonb NULLABLE
ipAddress: varchar(45) NULLABLE
userAgent: text NULLABLE
correlationId: varchar(255) NULLABLE
createdAt: TIMESTAMP WITH TIME ZONE
Indexes:
- entityType
- entityId
- action
- actorType
- createdAt
- (entityType, entityId)
- (actorId, createdAt)
```
### Table: blockchain_transactions
```typescript
id: UUID (PK)
txHash: varchar(255) UNIQUE
txType: ENUM (MINT_NFT, APPROVAL, DOC_UPDATE, REJECT, REVOKE)
relatedEntityType: varchar(100)
relatedEntityId: UUID
fromAddress: varchar(255)
toAddress: varchar(255) NULLABLE
status: ENUM (PENDING, CONFIRMED, FAILED)
blockNumber: bigint NULLABLE
gasUsed: bigint NULLABLE
errorMessage: text NULLABLE
createdAt: TIMESTAMP WITH TIME ZONE
confirmedAt: TIMESTAMP WITH TIME ZONE NULLABLE
Indexes:
- txHash
- status
- txType
- relatedEntityId
- createdAt
- (status, txType)
```
## Enums Defined
### LicenseRequestStatus (license_requests table)
- DRAFT
- SUBMITTED
- IN_REVIEW
- PENDING_RESUBMISSION
- APPROVED
- REJECTED
- REVOKED
- CANCELLED
### ApprovalStatus (approvals table)
- PENDING
- APPROVED
- REJECTED
- CHANGES_REQUESTED
- REVIEW_REQUIRED
### WebhookLogStatus (webhook_logs table)
- PENDING
- SUCCESS
- FAILED
### AuditEntityType (audit_logs table)
- REQUEST
- APPROVAL
- DOCUMENT
- DEPARTMENT
- WORKFLOW
### AuditActorType (audit_logs table)
- APPLICANT
- DEPARTMENT
- SYSTEM
- ADMIN
### BlockchainTransactionType (blockchain_transactions table)
- MINT_NFT
- APPROVAL
- DOC_UPDATE
- REJECT
- REVOKE
### BlockchainTransactionStatus (blockchain_transactions table)
- PENDING
- CONFIRMED
- FAILED
## Key Features
### 1. Data Integrity
- All foreign keys with CASCADE DELETE
- UNIQUE constraints on critical fields
- NOT NULL constraints where required
- CHECK constraints via TypeORM validation
### 2. Performance Optimization
- 40+ indexes covering all query patterns
- Composite indexes for common joins
- JSONB columns for flexible metadata
- Partitioning ready for large audit tables
### 3. Audit & Compliance
- Complete audit_logs tracking all changes
- Actor identification (who made the change)
- Old/new values for change comparison
- Correlation IDs for distributed tracing
- IP address and user agent capture
### 4. Blockchain Integration
- blockchain_transactions table for on-chain tracking
- NFT token ID field in license_requests
- Transaction hash storage for verification
- Block confirmation tracking
### 5. Workflow Management
- workflow_states for execution tracking
- Complete execution log with timestamps
- Pending approvals tracking by department
- Completed stages audit trail
### 6. Document Versioning
- Multiple versions per document
- SHA-256 hashing for integrity
- File size and mime type tracking
- Upload attribution and timestamps
### 7. Webhook System
- Flexible event subscription model
- Retry mechanism with count tracking
- Response time monitoring
- Status tracking (PENDING, SUCCESS, FAILED)
## Setup Instructions
### Prerequisites
- PostgreSQL 12+ with UUID extension
- Node.js 16+ with TypeORM
- npm or yarn package manager
### Steps
1. **Install Dependencies**
```bash
npm install typeorm pg uuid crypto dotenv
```
2. **Create PostgreSQL Database**
```bash
createdb goa_gel_db
```
3. **Configure Environment**
Create `.env` file:
```env
DATABASE_HOST=localhost
DATABASE_PORT=5432
DATABASE_USER=postgres
DATABASE_PASSWORD=your_password
DATABASE_NAME=goa_gel_db
DATABASE_LOGGING=true
DATABASE_SSL=false
NODE_ENV=development
```
4. **Run Migrations**
```bash
npx typeorm migration:run -d src/database/data-source.ts
```
5. **Seed Sample Data**
```bash
npx ts-node src/database/seeders/seed.ts
```
6. **Verify Setup**
```bash
psql goa_gel_db -c "\dt"
```
## Entity Relationships
```
┌─────────────┐
│ Applicant │
└──────┬──────┘
│ 1:N
├─────────────────────────────┬──────────────────┐
│ │ │
▼ ▼ ▼
┌──────────────────┐ ┌──────────────┐ ┌────────────────┐
│ LicenseRequest │ │ Workflow │ │ WorkflowState │
└────┬─────────────┘ └──────────────┘ └────────────────┘
│ △
│ 1:N │
├─────────┬──────────┐ │
│ │ │ │
▼ ▼ ▼ 1:1 relation
┌────────────┐ ┌───────────┐ ┌──────────────┐
│ Document │ │ Approval │ │ Approval │
│ 1:N │ │ 1:N │ │ Status │
│ DocumentV │ │ Department│ │ Tracking │
└────────────┘ └─────┬─────┘ └──────────────┘
│ N:1
┌──────▼──────┐
│ Department │
│ 1:N │
│ Webhook │
│ 1:N │
│ WebhookLog │
└─────────────┘
AuditLog ─── Tracks all changes to above entities
BlockchainTransaction ─── Records NFT minting and approvals
```
## Common Operations
### Create a New Migration
```bash
npx typeorm migration:generate -d src/database/data-source.ts -n AddNewField
```
### Generate Migration from Entity Changes
```bash
npx typeorm migration:generate -d src/database/data-source.ts -n AutoGenerated
```
### Revert Last Migration
```bash
npx typeorm migration:revert -d src/database/data-source.ts
```
### View Migration Status
```bash
npx typeorm migration:show -d src/database/data-source.ts
```
## Security Considerations
1. **Hash Storage** - API keys, secrets, and webhook secrets are hashed with SHA-256
2. **Wallet Addresses** - Normalized to lowercase to prevent duplication
3. **Cascade Delete** - Foreign keys cascade to prevent orphaned records
4. **Audit Trail** - All critical operations logged with actor identification
5. **Correlation IDs** - Support distributed tracing for audit
6. **JSONB Validation** - Additional validation at application layer
## Performance Tips
1. **Index Usage** - All frequently queried columns are indexed
2. **Composite Indexes** - Multi-column queries optimized
3. **JSONB Queries** - Use PostgreSQL native JSONB operations
4. **Batch Operations** - Use chunking for large inserts
5. **Connection Pooling** - Configured at 20 connections (production)
## File Locations
```
/sessions/cool-elegant-faraday/mnt/Goa-GEL/backend/src/database/
├── entities/
│ ├── applicant.entity.ts
│ ├── approval.entity.ts
│ ├── audit-log.entity.ts
│ ├── blockchain-transaction.entity.ts
│ ├── department.entity.ts
│ ├── document-version.entity.ts
│ ├── document.entity.ts
│ ├── index.ts
│ ├── license-request.entity.ts
│ ├── webhook-log.entity.ts
│ ├── webhook.entity.ts
│ ├── workflow-state.entity.ts
│ └── workflow.entity.ts
├── migrations/
│ └── 1704067200000-InitialSchema.ts
├── seeders/
│ └── seed.ts
├── data-source.ts
├── index.ts
└── README.md
```
## Next Steps
1. Install PostgreSQL and create database
2. Configure environment variables in `.env`
3. Run migrations: `npx typeorm migration:run -d src/database/data-source.ts`
4. Seed sample data: `npx ts-node src/database/seeders/seed.ts`
5. Start backend application
6. Verify database tables: `psql goa_gel_db -c "\dt"`
## Support
For detailed information, see:
- `/src/database/README.md` - Database documentation
- `/src/database/entities/` - Entity definitions with comments
- `/src/database/migrations/` - SQL migration details

74
backend/Dockerfile Normal file
View File

@@ -0,0 +1,74 @@
# ================================
# Build Stage
# ================================
FROM node:20-alpine AS builder
WORKDIR /app
# Install dependencies for native modules
RUN apk add --no-cache python3 make g++
# Copy package files
COPY package*.json ./
# Install ALL dependencies (including devDependencies for build)
RUN npm ci && npm cache clean --force
# Copy source code
COPY . .
# Build the application
RUN npm run build
# Remove devDependencies after build
RUN npm prune --production
# ================================
# Production Stage
# ================================
FROM node:20-alpine AS production
WORKDIR /app
# Add labels
LABEL maintainer="Government of Goa"
LABEL description="Goa GEL Backend - Blockchain Document Verification Platform"
LABEL version="1.0.0"
# Install runtime dependencies
RUN apk add --no-cache \
postgresql-client \
bash \
wget
# Create non-root user for security
RUN addgroup -g 1001 -S nodejs && \
adduser -S nestjs -u 1001
# Copy built application from builder
COPY --from=builder --chown=nestjs:nodejs /app/dist ./dist
COPY --from=builder --chown=nestjs:nodejs /app/node_modules ./node_modules
COPY --from=builder --chown=nestjs:nodejs /app/package*.json ./
# Copy compiled database migrations and seeds from dist
COPY --from=builder --chown=nestjs:nodejs /app/dist/database/migrations ./src/database/migrations
COPY --from=builder --chown=nestjs:nodejs /app/dist/database/seeds ./src/database/seeds
COPY --from=builder --chown=nestjs:nodejs /app/dist/database/knexfile.js ./src/database/knexfile.js
# Copy initialization scripts
COPY --chown=nestjs:nodejs scripts ./scripts
RUN chmod +x scripts/*.sh
# Set environment variables
ENV NODE_ENV=production
ENV PORT=3001
# Expose port
EXPOSE 3001
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD wget --no-verbose --tries=1 --spider http://localhost:3001/api/v1/health || exit 1
# Use entrypoint script for initialization
ENTRYPOINT ["/app/scripts/docker-entrypoint.sh"]

285
backend/FILES_CREATED.txt Normal file
View File

@@ -0,0 +1,285 @@
GOA GEL BLOCKCHAIN DOCUMENT VERIFICATION PLATFORM
Database Schema - Complete File Listing
============================================
CREATED: 2024-12-04
DIRECTORY STRUCTURE:
/sessions/cool-elegant-faraday/mnt/Goa-GEL/backend/src/database/
DATABASE ENTITY FILES (12 entities + types)
============================================
1. /src/database/entities/applicant.entity.ts
- Applicant entity with wallet integration
- Fields: id, digilockerId, name, email, phone, walletAddress, isActive
- Indexes: digilockerId, walletAddress, email
- Relations: OneToMany with LicenseRequest
2. /src/database/entities/department.entity.ts
- Department entity for government agencies
- Fields: id, code, name, walletAddress, apiKeyHash, apiSecretHash, webhookUrl, webhookSecretHash
- Indexes: code, walletAddress
- Relations: OneToMany with Approval, OneToMany with Webhook
3. /src/database/entities/workflow.entity.ts
- Workflow definition entity
- Fields: id, workflowType, name, description, version, definition (JSONB)
- Indexes: workflowType
- Relations: OneToMany with LicenseRequest
4. /src/database/entities/license-request.entity.ts
- Main license request entity
- Fields: id, requestNumber, tokenId, applicantId, requestType, workflowId, status, metadata, currentStageId, blockchainTxHash
- Status Enum: DRAFT, SUBMITTED, IN_REVIEW, PENDING_RESUBMISSION, APPROVED, REJECTED, REVOKED, CANCELLED
- Indexes: requestNumber, applicantId, workflowId, status, createdAt, (applicantId, status)
- Relations: ManyToOne Applicant, ManyToOne Workflow, OneToMany Document, OneToMany Approval, OneToOne WorkflowState
5. /src/database/entities/document.entity.ts
- Document entity for uploaded files
- Fields: id, requestId, docType, originalFilename, currentVersion, currentHash (SHA-256), minioBucket, isActive
- Indexes: requestId, (requestId, docType), currentHash
- Relations: ManyToOne LicenseRequest, OneToMany DocumentVersion
6. /src/database/entities/document-version.entity.ts
- Document version tracking entity
- Fields: id, documentId, version, hash (SHA-256), minioPath, fileSize, mimeType, uploadedBy, blockchainTxHash
- Indexes: documentId, hash
- Unique Constraint: (documentId, version)
- Relations: ManyToOne Document
7. /src/database/entities/approval.entity.ts
- Department approval entity
- Fields: id, requestId, departmentId, status, remarks, remarksHash, reviewedDocuments (JSONB), blockchainTxHash, isActive, invalidatedAt, invalidationReason
- Status Enum: PENDING, APPROVED, REJECTED, CHANGES_REQUESTED, REVIEW_REQUIRED
- Indexes: requestId, departmentId, status, (requestId, departmentId), (requestId, status)
- Relations: ManyToOne LicenseRequest, ManyToOne Department
8. /src/database/entities/workflow-state.entity.ts
- Workflow execution state entity
- Fields: id, requestId, currentStageId, completedStages (JSONB), pendingApprovals (JSONB), executionLog (JSONB), stageStartedAt
- Indexes: requestId
- Relations: OneToOne LicenseRequest
9. /src/database/entities/webhook.entity.ts
- Webhook configuration entity
- Fields: id, departmentId, url, events (JSONB), secretHash, isActive
- Indexes: departmentId, (departmentId, isActive)
- Relations: ManyToOne Department, OneToMany WebhookLog
10. /src/database/entities/webhook-log.entity.ts
- Webhook delivery audit entity
- Fields: id, webhookId, eventType, payload (JSONB), responseStatus, responseBody, responseTime, retryCount, status
- Status Enum: PENDING, SUCCESS, FAILED
- Indexes: webhookId, eventType, status, createdAt, (webhookId, status)
- Relations: ManyToOne Webhook
11. /src/database/entities/audit-log.entity.ts
- Comprehensive audit trail entity
- Fields: id, entityType, entityId, action, actorType, actorId, oldValue (JSONB), newValue (JSONB), ipAddress, userAgent, correlationId
- EntityType Enum: REQUEST, APPROVAL, DOCUMENT, DEPARTMENT, WORKFLOW
- ActorType Enum: APPLICANT, DEPARTMENT, SYSTEM, ADMIN
- Indexes: entityType, entityId, action, actorType, createdAt, (entityType, entityId), (actorId, createdAt)
12. /src/database/entities/blockchain-transaction.entity.ts
- Blockchain transaction tracking entity
- Fields: id, txHash, txType, relatedEntityType, relatedEntityId, fromAddress, toAddress, status, blockNumber, gasUsed, errorMessage, confirmedAt
- TxType Enum: MINT_NFT, APPROVAL, DOC_UPDATE, REJECT, REVOKE
- Status Enum: PENDING, CONFIRMED, FAILED
- Indexes: txHash, status, txType, relatedEntityId, createdAt, (status, txType)
13. /src/database/entities/types.ts
- TypeScript type definitions and interfaces
- Includes: WorkflowDefinition, LicenseRequestMetadata, WebhookEventPayload, AuditChangeRecord, etc.
14. /src/database/entities/index.ts
- Entity barrel export file
- Exports all entities and enums
CORE CONFIGURATION FILES
=======================
15. /src/database/data-source.ts
- TypeORM DataSource configuration
- PostgreSQL connection setup
- All 12 entities registered
- Migrations and subscribers configured
- Connection pooling (20 in production, 10 in development)
16. /src/database/index.ts
- Main database module export
- Exports DataSource and all entities
MIGRATION FILES
===============
17. /src/database/migrations/1704067200000-InitialSchema.ts
- Complete initial database schema migration
- Creates all 12 tables with proper constraints
- Creates 7 PostgreSQL enums:
* license_requests_status_enum
* approvals_status_enum
* webhook_logs_status_enum
* audit_logs_entity_type_enum
* audit_logs_actor_type_enum
* blockchain_transactions_tx_type_enum
* blockchain_transactions_status_enum
- Adds 40+ indexes for performance optimization
- Includes complete down() migration for rollback
SEEDER FILES
============
18. /src/database/seeders/seed.ts
- Database seeding script
- Creates sample data:
* 4 departments (Fire, Tourism, Municipal, Health)
* 1 RESORT_LICENSE workflow with 5 stages
* 2 sample applicants
* 1 license request in DRAFT status with workflow state
- Uses SHA-256 hashing for sensitive data
- Wallet address normalization
DOCUMENTATION FILES
===================
19. /src/database/README.md
- Comprehensive database documentation
- Entity descriptions and relationships
- Setup instructions (5 steps)
- Common SQL queries
- Maintenance procedures
- Troubleshooting guide
20. /DATABASE_SETUP.md
- Complete database setup guide
- Detailed schema definitions for all 12 tables
- Enum definitions
- Entity relationships diagram
- Security considerations
- Performance tips
21. /QUICK_START.md
- 5-minute quick start guide
- Command reference
- Common queries
- Troubleshooting quick fixes
22. /FILES_CREATED.txt
- This file
- Complete listing of all created files
SUMMARY STATISTICS
==================
Total Files Created: 22
Entity Files: 14 (12 entities + types + index)
Configuration Files: 2
Migration Files: 1
Seeder Files: 1
Documentation Files: 4
Total Entities: 12
Total Enums: 7
Total Tables Created: 12
Total Indexes Created: 40+
Total Foreign Keys: 10
DATABASE SCHEMA FEATURES
=======================
✓ All entities with proper TypeORM decorators
✓ UUID primary keys for all tables
✓ Proper foreign key relationships with CASCADE DELETE
✓ JSONB columns for flexible metadata storage
✓ Comprehensive indexing for query performance
✓ Custom enums for type safety
✓ BeforeInsert hooks for auto-generation and normalization
✓ Cascade operations properly configured
✓ Unique constraints on critical fields
✓ NOT NULL constraints where required
✓ Timestamp tracking (createdAt, updatedAt)
✓ Soft delete support via isActive boolean
✓ Full audit trail with actor identification
✓ Blockchain integration ready
✓ Webhook system configured
✓ Multi-stage workflow support
ENVIRONMENT VARIABLES REQUIRED
=============================
DATABASE_HOST=localhost (Default: localhost)
DATABASE_PORT=5432 (Default: 5432)
DATABASE_USER=postgres (Default: gel_user)
DATABASE_PASSWORD=*** (No default - REQUIRED)
DATABASE_NAME=goa_gel_db (Default: goa_gel_db)
DATABASE_LOGGING=true (Default: false)
DATABASE_SSL=false (Default: false)
NODE_ENV=development (Options: development|production)
SETUP INSTRUCTIONS
==================
1. npm install typeorm pg uuid crypto dotenv ts-node
2. Create .env with database credentials
3. createdb goa_gel_db
4. npx typeorm migration:run -d src/database/data-source.ts
5. npx ts-node src/database/seeders/seed.ts
6. Verify: psql goa_gel_db -c "\dt"
KEY FEATURES IMPLEMENTED
=======================
✓ Multi-stage approval workflows (5 stages for RESORT_LICENSE)
✓ Document versioning with SHA-256 hashing
✓ Blockchain NFT minting integration
✓ Webhook event system with retry mechanism
✓ Comprehensive audit logging
✓ Workflow state execution tracking
✓ Department-level approvals with remarks
✓ Applicant wallet address tracking
✓ Minio object storage integration
✓ Complete transaction tracking
✓ Status transition auditing
✓ Correlation ID support for distributed tracing
RELATIONSHIPS SUMMARY
====================
Applicant 1:N LicenseRequest
LicenseRequest N:1 Applicant
LicenseRequest N:1 Workflow
LicenseRequest 1:N Document
LicenseRequest 1:N Approval
LicenseRequest 1:1 WorkflowState
Document 1:N DocumentVersion
Approval N:1 Department
Department 1:N Approval
Department 1:N Webhook
Webhook 1:N WebhookLog
AuditLog (independent - tracks all entities)
BlockchainTransaction (independent - tracks transactions)
Total Relationships: 13 foreign key relationships
FILES READY FOR USE
===================
All files are production-ready and include:
✓ TypeScript type annotations
✓ JSDoc comments for clarity
✓ Proper error handling
✓ Performance optimizations
✓ Security best practices
✓ Extensible design patterns
To verify setup:
1. Check all files exist: ls -la /src/database/
2. Install dependencies: npm install
3. Run migrations: npx typeorm migration:run -d src/database/data-source.ts
4. Seed data: npx ts-node src/database/seeders/seed.ts
5. Connect to database: psql goa_gel_db -c "\dt"
All entities are ready for immediate use in your application!

View File

@@ -0,0 +1,453 @@
# Goa GEL Backend - Complete Project Structure
## Project Layout
```
/sessions/cool-elegant-faraday/mnt/Goa-GEL/backend/
├── src/
│ ├── config/
│ │ ├── app.config.ts # Application configuration
│ │ ├── database.config.ts # PostgreSQL TypeORM setup
│ │ ├── blockchain.config.ts # Hyperledger Besu/Web3 setup
│ │ ├── redis.config.ts # Redis client configuration
│ │ ├── minio.config.ts # MinIO object storage setup
│ │ └── jwt.config.ts # JWT authentication config
│ │
│ ├── common/
│ │ ├── decorators/
│ │ │ ├── api-key.decorator.ts # API key authentication decorator
│ │ │ ├── current-user.decorator.ts # Inject current user into request
│ │ │ └── department.decorator.ts # Department requirement decorator
│ │ │
│ │ ├── filters/
│ │ │ ├── http-exception.filter.ts # HTTP exception handler
│ │ │ └── all-exceptions.filter.ts # Global exception handler
│ │ │
│ │ ├── guards/
│ │ │ ├── jwt-auth.guard.ts # JWT authentication guard
│ │ │ ├── api-key.guard.ts # API key authentication guard
│ │ │ └── roles.guard.ts # Role-based access control guard
│ │ │
│ │ ├── interceptors/
│ │ │ ├── logging.interceptor.ts # Request/response logging
│ │ │ ├── transform.interceptor.ts # Response transformation
│ │ │ └── timeout.interceptor.ts # Request timeout handling
│ │ │
│ │ ├── pipes/
│ │ │ └── validation.pipe.ts # Custom validation pipe
│ │ │
│ │ ├── utils/
│ │ │ ├── hash.util.ts # Password and data hashing (bcrypt, SHA256/512)
│ │ │ ├── crypto.util.ts # Encryption/decryption (AES-256-GCM)
│ │ │ └── date.util.ts # Date manipulation utilities
│ │ │
│ │ ├── interfaces/
│ │ │ └── request-context.interface.ts # API response and pagination types
│ │ │
│ │ └── constants/
│ │ ├── error-codes.ts # Error codes and messages
│ │ └── events.ts # Application events
│ │
│ ├── database/
│ │ ├── data-source.ts # TypeORM data source
│ │ ├── migrations/ # TypeORM database migrations
│ │ ├── seeders/
│ │ │ └── seed.ts # Database seeding script
│ │ └── subscribers/ # TypeORM entity subscribers
│ │
│ ├── blockchain/
│ │ ├── blockchain.service.ts # Blockchain service
│ │ └── blockchain.module.ts # Blockchain module
│ │
│ ├── storage/
│ │ ├── storage.service.ts # MinIO storage service
│ │ └── storage.module.ts # Storage module
│ │
│ ├── queue/
│ │ └── queue.module.ts # Bull queue configuration
│ │
│ ├── modules/ # Feature modules (to be implemented)
│ │ ├── auth/
│ │ ├── users/
│ │ ├── documents/
│ │ └── departments/
│ │
│ └── app.module.ts # Root module
│ └── main.ts # Application entry point
├── test/
│ └── jest-e2e.json # E2E testing configuration
├── Configuration Files
├── .eslintrc.js # ESLint configuration (strict TypeScript rules)
├── .prettierrc # Code formatting rules
├── tsconfig.json # TypeScript strict configuration
├── nest-cli.json # NestJS CLI configuration
├── jest.config.js # Unit testing configuration
├── package.json # Dependencies and scripts
├── Docker & Deployment
├── Dockerfile # Multi-stage Docker build
├── docker-compose.yml # Development services (PostgreSQL, Redis, MinIO)
├── Environment & Git
├── .env.example # Environment variables template
├── .gitignore # Git ignore rules
├── .dockerignore # Docker ignore rules
├── .editorconfig # Editor configuration
└── Documentation
├── README.md # Project documentation
└── PROJECT_STRUCTURE.md # This file
```
## File Descriptions
### Configuration (src/config/)
1. **app.config.ts**
- Application name, version, port, host
- API prefix configuration
- CORS settings
- File upload limits
- Feature flags
2. **database.config.ts**
- PostgreSQL connection settings
- TypeORM configuration
- Entity and migration paths
- Logging and synchronization options
3. **blockchain.config.ts**
- Hyperledger Besu RPC URL
- Smart contract address
- Gas price and limit settings
- Private key for transactions
- Network configuration
4. **redis.config.ts**
- Redis host, port, password
- Database selection
- Retry and reconnection strategies
- TLS/SSL options
5. **minio.config.ts**
- MinIO endpoint and credentials
- Bucket names for documents and archives
- Region settings
- SSL configuration
6. **jwt.config.ts**
- JWT secret and expiration
- Refresh token settings
- API key header and value
- Token validation
### Common Utilities (src/common/)
1. **Decorators**
- `@ApiKeyAuth()`: Mark endpoints requiring API key
- `@CurrentUser()`: Inject authenticated user
- `@RequireDepartment()`: Enforce department access
2. **Filters**
- `HttpExceptionFilter`: Handle HTTP exceptions consistently
- `AllExceptionsFilter`: Catch unhandled exceptions
3. **Guards**
- `JwtAuthGuard`: Validate JWT tokens
- `ApiKeyGuard`: Validate API keys
- `RolesGuard`: Enforce role-based access control
4. **Interceptors**
- `LoggingInterceptor`: Log all HTTP requests/responses
- `TransformInterceptor`: Standardize API responses
- `TimeoutInterceptor`: Enforce request timeouts
5. **Pipes**
- `ValidationPipe`: Validate and transform DTOs
6. **Utils**
- `HashUtil`: Password hashing (bcrypt), file hashing (SHA256)
- `CryptoUtil`: AES-256-GCM encryption/decryption
- `DateUtil`: Date manipulation and formatting
### Database (src/database/)
- **data-source.ts**: TypeORM DataSource configuration for CLI and programmatic access
- **migrations/**: Version-controlled database schema changes
- **seeders/**: Initial data population scripts
- **subscribers/**: Entity lifecycle event handlers
### Core Services
1. **BlockchainService** (src/blockchain/)
- Connect to Hyperledger Besu
- Deploy and interact with smart contracts
- Monitor transaction status
- Handle blockchain errors
2. **StorageService** (src/storage/)
- MinIO client initialization
- Bucket creation and management
- File upload/download operations
- Secure file storage
3. **QueueModule** (src/queue/)
- Bull queue configuration
- Job queues:
- document-verification
- blockchain-transactions
- document-archive
- email-notifications
- audit-logs
## Key Technologies
### Backend Framework
- **NestJS 10**: Progressive Node.js framework
- **TypeScript 5**: Strict typing, no `any` types
### Database
- **PostgreSQL 16**: Relational database
- **TypeORM 0.3**: ORM with migrations
- **Redis 7**: Caching and sessions
### Blockchain
- **Hyperledger Besu**: Ethereum-compatible blockchain
- **ethers.js 6.9**: Web3 interaction library
### Storage
- **MinIO 7**: S3-compatible object storage
- **Multer**: File upload middleware
### Queue & Async
- **Bull 4**: Redis-based job queue
- **RxJS 7**: Reactive programming
### Authentication
- **JWT**: Stateless authentication
- **Passport.js**: Authentication middleware
- **bcrypt**: Password hashing
### Monitoring & Logging
- **Winston 3**: Structured logging
- **Helmet 7**: Security headers
- **Swagger/OpenAPI**: API documentation
### Testing
- **Jest 29**: Unit testing
- **Supertest 6**: HTTP testing
- **ts-jest**: TypeScript support
### Code Quality
- **ESLint 8**: Linting (strict rules)
- **Prettier 3**: Code formatting
- **Class-validator**: DTO validation
- **Class-transformer**: Object transformation
## NPM Scripts
```bash
npm run build # Build production bundle
npm run start # Run production build
npm run start:dev # Run with hot reload
npm run start:debug # Run in debug mode
npm run lint # Check and fix code style
npm run format # Format code with Prettier
npm run test # Run unit tests
npm run test:watch # Watch and rerun tests
npm run test:cov # Generate coverage report
npm run test:e2e # Run end-to-end tests
npm run migration:generate # Create new migration
npm run migration:run # Run pending migrations
npm run migration:revert # Revert last migration
npm run seed # Seed database with initial data
```
## Environment Variables
### Core
- `NODE_ENV`: development | production
- `APP_NAME`, `APP_VERSION`, `APP_PORT`, `APP_HOST`
- `API_PREFIX`: API route prefix (default: /api/v1)
### Database
- `DATABASE_HOST`, `DATABASE_PORT`, `DATABASE_NAME`
- `DATABASE_USER`, `DATABASE_PASSWORD`
- `DATABASE_SSL`, `DATABASE_LOGGING`, `DATABASE_SYNCHRONIZE`
### Redis
- `REDIS_HOST`, `REDIS_PORT`, `REDIS_PASSWORD`, `REDIS_DB`
- `REDIS_TLS`: Enable TLS
### Blockchain
- `BLOCKCHAIN_RPC_URL`: Besu node RPC URL
- `BLOCKCHAIN_CHAIN_ID`: Network chain ID
- `BLOCKCHAIN_CONTRACT_ADDRESS`: Smart contract address
- `BLOCKCHAIN_PRIVATE_KEY`: Account private key
- `BLOCKCHAIN_GAS_PRICE`, `BLOCKCHAIN_GAS_LIMIT`
### MinIO
- `MINIO_ENDPOINT`, `MINIO_PORT`
- `MINIO_ACCESS_KEY`, `MINIO_SECRET_KEY`
- `MINIO_BUCKET_DOCUMENTS`, `MINIO_BUCKET_ARCHIVES`
### Security
- `JWT_SECRET`: JWT signing key (required)
- `JWT_EXPIRATION`: Token lifetime (default: 7d)
- `API_KEY_HEADER`, `API_KEY_VALUE`: API key authentication
### CORS & Throttling
- `CORS_ORIGIN`: Allowed origins (comma-separated)
- `CORS_CREDENTIALS`: Enable credentials
- `THROTTLE_TTL`, `THROTTLE_LIMIT`: Rate limiting
### Features
- `ENABLE_BLOCKCHAIN_VERIFICATION`: true | false
- `ENABLE_AUDIT_LOGGING`: true | false
- `ENABLE_EMAIL_NOTIFICATIONS`: true | false
- `ENABLE_RATE_LIMITING`: true | false
## Module Responsibilities
### AppModule (Root)
- Configures all application modules
- Sets up database connection
- Initializes Redis and queues
- Configures throttling and validation
### BlockchainModule
- Provides blockchain service
- Manages Besu connections
- Handles smart contract interactions
### StorageModule
- Provides MinIO client
- Manages object storage
- Handles file operations
### QueueModule
- Configures Bull queues
- Manages async job processing
- Handles background tasks
### Feature Modules (TBD)
- **AuthModule**: Authentication and authorization
- **UsersModule**: User management
- **DocumentsModule**: Document operations
- **DepartmentsModule**: Department management
- **AuditModule**: Audit logging
## Error Handling
All errors follow a standardized format:
```typescript
{
success: false,
statusCode: number,
message: string,
error: {
code: string, // e.g., "DOC_001"
message: string,
details?: object
},
timestamp: string, // ISO 8601
path: string,
requestId?: string
}
```
Error codes are prefixed by domain:
- `AUTH_*`: Authentication errors
- `USER_*`: User management
- `DOC_*`: Document operations
- `CHAIN_*`: Blockchain operations
- `STOR_*`: Storage operations
- `VAL_*`: Validation errors
- `DB_*`: Database errors
- `QUEUE_*`: Queue operations
## Security Features
1. **Authentication**
- JWT tokens with expiration
- API key support
- Passport.js integration
2. **Authorization**
- Role-based access control (RBAC)
- Department-based filtering
- Permission validation
3. **Data Protection**
- Password hashing (bcrypt, 12 rounds)
- AES-256-GCM encryption
- SSL/TLS support
- HTTPS enforcement ready
4. **API Security**
- Helmet security headers
- CORS configuration
- Rate limiting
- Input validation
- SQL injection prevention (TypeORM)
5. **Logging & Audit**
- Request/response logging
- Audit trail
- Error tracking
- Performance monitoring
## Testing Structure
```
test/
├── jest-e2e.json # E2E configuration
├── e2e/ # E2E test files
└── unit/ # Unit test files
src/**/*.spec.ts # Unit test files (co-located)
```
## Docker & Deployment
### Services in docker-compose.yml
1. **PostgreSQL 16**: Port 5432
2. **Redis 7**: Port 6379
3. **MinIO**: Ports 9000 (API), 9001 (Console)
### Production Build
1. Multi-stage Dockerfile for optimized image
2. Separate dev and production dependencies
3. Health checks configured
## Next Steps
1. Implement feature modules:
- Authentication module
- User management
- Document management
- Department management
2. Create database entities and migrations
3. Implement API endpoints
4. Add comprehensive tests
5. Configure blockchain integration
6. Set up CI/CD pipeline
7. Deploy to production infrastructure
---
**Version**: 1.0.0
**Last Updated**: 2024-01-01
**Maintainer**: Government of Goa

378
backend/QUICK_START.md Normal file
View File

@@ -0,0 +1,378 @@
# Goa GEL Database - Quick Start Guide
## 5-Minute Setup
### 1. Install Dependencies
```bash
npm install typeorm pg uuid crypto dotenv ts-node
```
### 2. Create `.env` File
```env
DATABASE_HOST=localhost
DATABASE_PORT=5432
DATABASE_USER=postgres
DATABASE_PASSWORD=your_password
DATABASE_NAME=goa_gel_db
DATABASE_LOGGING=true
DATABASE_SSL=false
NODE_ENV=development
```
### 3. Create Database
```bash
createdb goa_gel_db
```
### 4. Run Migrations
```bash
npx typeorm migration:run -d src/database/data-source.ts
```
### 5. Seed Sample Data
```bash
npx ts-node src/database/seeders/seed.ts
```
### 6. Verify
```bash
psql goa_gel_db -c "\dt"
```
## Database Structure
### 12 Core Entities
```
Applicant (License applicants)
├── id, digilockerId, name, email, phone, walletAddress
Department (Government departments)
├── id, code, name, walletAddress, apiKeyHash, webhookUrl
Workflow (Multi-stage workflows)
├── id, workflowType, definition (JSONB), stages
LicenseRequest (Main entity)
├── id, requestNumber, applicantId, workflowId
├── status (8 values), metadata, currentStageId
Document (Uploaded files)
├── id, requestId, docType, currentHash (SHA-256)
DocumentVersion (File versions)
├── id, documentId, version, hash, fileSize
Approval (Department approvals)
├── id, requestId, departmentId
├── status (5 values), remarks, blockchainTxHash
WorkflowState (Execution tracking)
├── id, requestId, currentStageId, executionLog (JSONB)
Webhook (Webhook configs)
├── id, departmentId, url, events (JSONB)
WebhookLog (Webhook audit)
├── id, webhookId, eventType, status, retryCount
AuditLog (Change tracking)
├── id, entityType, entityId, action, oldValue, newValue
BlockchainTransaction (NFT minting)
├── id, txHash, txType, status, blockNumber
```
## Key Features
### Status Tracking
- **LicenseRequest**: DRAFT, SUBMITTED, IN_REVIEW, PENDING_RESUBMISSION, APPROVED, REJECTED, REVOKED, CANCELLED
- **Approval**: PENDING, APPROVED, REJECTED, CHANGES_REQUESTED, REVIEW_REQUIRED
- **WebhookLog**: PENDING, SUCCESS, FAILED
- **BlockchainTransaction**: PENDING, CONFIRMED, FAILED
### Workflow Stages (RESORT_LICENSE)
1. Fire Safety Approval (FIRE_DEPT)
2. Tourism Clearance (TOURISM_DEPT)
3. Health Department Approval (HEALTH_DEPT)
4. Municipal Approval (MUNI_DEPT)
5. License Issuance (System Action - NFT Minting)
### Sample Data After Seeding
- 4 Departments (Fire, Tourism, Municipal, Health)
- 2 Applicants
- 1 RESORT_LICENSE Workflow with 5 stages
- 1 License Request in DRAFT status
## TypeORM Commands
```bash
# Run migrations
npx typeorm migration:run -d src/database/data-source.ts
# Generate migration from entity changes
npx typeorm migration:generate -d src/database/data-source.ts -n MigrationName
# Revert last migration
npx typeorm migration:revert -d src/database/data-source.ts
# Show migration status
npx typeorm migration:show -d src/database/data-source.ts
# Sync schema (development only)
npx typeorm schema:sync -d src/database/data-source.ts
# Drop database schema
npx typeorm schema:drop -d src/database/data-source.ts
```
## Common Queries
### Find Applicant with Requests
```typescript
const applicant = await applicantRepository.findOne({
where: { id: applicantId },
relations: ['licenseRequests'],
});
```
### Get Pending Approvals
```typescript
const pending = await approvalRepository.find({
where: {
status: ApprovalStatus.PENDING,
departmentId: deptId,
isActive: true
},
relations: ['request', 'department'],
order: { createdAt: 'ASC' }
});
```
### Find License Request with Details
```typescript
const request = await licenseRequestRepository.findOne({
where: { id: requestId },
relations: [
'applicant',
'workflow',
'documents',
'documents.versions',
'approvals',
'approvals.department',
'workflowState'
]
});
```
### Get Audit Trail
```typescript
const auditTrail = await auditLogRepository.find({
where: { entityId: entityId },
order: { createdAt: 'DESC' },
take: 100
});
```
### Track Blockchain Transactions
```typescript
const txs = await blockchainTransactionRepository.find({
where: { relatedEntityId: requestId },
order: { createdAt: 'DESC' }
});
```
## File Structure
```
/src/database/
├── entities/ # 12 entity files + types
│ ├── applicant.entity.ts
│ ├── department.entity.ts
│ ├── workflow.entity.ts
│ ├── license-request.entity.ts
│ ├── document.entity.ts
│ ├── document-version.entity.ts
│ ├── approval.entity.ts
│ ├── workflow-state.entity.ts
│ ├── webhook.entity.ts
│ ├── webhook-log.entity.ts
│ ├── audit-log.entity.ts
│ ├── blockchain-transaction.entity.ts
│ ├── types.ts
│ └── index.ts
├── migrations/
│ └── 1704067200000-InitialSchema.ts
├── seeders/
│ └── seed.ts
├── data-source.ts
├── index.ts
└── README.md
```
## Indexes (40+ Total)
### Applicant Indexes
- digilockerId, walletAddress, email
### LicenseRequest Indexes
- requestNumber, applicantId, workflowId, status, createdAt
- Composite: (applicantId, status)
### Approval Indexes
- requestId, departmentId, status
- Composite: (requestId, departmentId), (requestId, status)
### Document Indexes
- requestId, currentHash
- Composite: (requestId, docType)
### AuditLog Indexes
- entityType, entityId, action, actorType, createdAt
- Composite: (entityType, entityId), (actorId, createdAt)
### BlockchainTransaction Indexes
- txHash, status, txType, relatedEntityId, createdAt
- Composite: (status, txType)
### WebhookLog Indexes
- webhookId, eventType, status, createdAt
- Composite: (webhookId, status)
## Environment Variables
```env
# Database Connection
DATABASE_HOST=localhost # PostgreSQL host
DATABASE_PORT=5432 # PostgreSQL port
DATABASE_USER=postgres # DB username
DATABASE_PASSWORD=*** # DB password
DATABASE_NAME=goa_gel_db # Database name
# Application
NODE_ENV=development # development|production
DATABASE_LOGGING=true # Enable query logging
DATABASE_SSL=false # SSL connection
```
## Sample SQL Queries
### Get Applicant with Active Requests
```sql
SELECT a.*, COUNT(lr.id) as request_count
FROM applicants a
LEFT JOIN license_requests lr ON a.id = lr.applicantId AND lr.status != 'CANCELLED'
WHERE a.isActive = true
GROUP BY a.id;
```
### Get Workflow Progress
```sql
SELECT
lr.requestNumber,
lr.status,
ws.currentStageId,
COUNT(CASE WHEN a.status = 'APPROVED' THEN 1 END) as approved_count,
COUNT(CASE WHEN a.status = 'PENDING' THEN 1 END) as pending_count
FROM license_requests lr
JOIN workflow_states ws ON lr.id = ws.requestId
LEFT JOIN approvals a ON lr.id = a.requestId AND a.isActive = true
GROUP BY lr.id, ws.id;
```
### Get Department Statistics
```sql
SELECT
d.code,
d.name,
COUNT(a.id) as total_approvals,
COUNT(CASE WHEN a.status = 'PENDING' THEN 1 END) as pending,
COUNT(CASE WHEN a.status = 'APPROVED' THEN 1 END) as approved,
COUNT(CASE WHEN a.status = 'REJECTED' THEN 1 END) as rejected
FROM departments d
LEFT JOIN approvals a ON d.id = a.departmentId AND a.isActive = true
GROUP BY d.id;
```
### Get Recent Audit Trail
```sql
SELECT *
FROM audit_logs
WHERE entityId = $1
ORDER BY createdAt DESC
LIMIT 100;
```
## Troubleshooting
### Database Won't Connect
```bash
# Check if PostgreSQL is running
sudo systemctl status postgresql
# Test connection
psql -h localhost -U postgres -c "SELECT 1"
```
### Migration Failed
```bash
# Check migration status
npx typeorm migration:show -d src/database/data-source.ts
# Revert problematic migration
npx typeorm migration:revert -d src/database/data-source.ts
# Check for entity/migration conflicts
ls -la src/database/entities/
ls -la src/database/migrations/
```
### Seeding Failed
```bash
# Drop and recreate
npx typeorm schema:drop -d src/database/data-source.ts
npx typeorm migration:run -d src/database/data-source.ts
npx ts-node src/database/seeders/seed.ts
```
### Check Database
```bash
# Connect to database
psql goa_gel_db
# List tables
\dt
# List indexes
\di
# Check constraint
\d license_requests
# View migration history
SELECT * FROM typeorm_migrations;
```
## Performance Tips
1. Always use indexes for WHERE clauses
2. Use relations only when needed
3. Use pagination for large result sets
4. Cache workflow definitions
5. Batch document uploads
6. Monitor slow queries
## Next Steps
1. Configure your application to use the database
2. Create repositories for each entity
3. Implement business logic services
4. Add API endpoints
5. Set up webhook listeners
6. Implement blockchain integration
## Support Files
- `/src/database/README.md` - Detailed documentation
- `/DATABASE_SETUP.md` - Complete setup guide
- `/src/database/entities/types.ts` - TypeScript interfaces

297
backend/README.md Normal file
View File

@@ -0,0 +1,297 @@
# Goa GEL Backend
**Blockchain Document Verification Platform for Government of Goa**
A production-ready NestJS backend for managing multi-department approval workflows with blockchain-backed verification using Hyperledger Besu and ERC-721 Soulbound NFTs.
## 🚀 Quick Start
### Prerequisites
- Node.js 18+
- Docker & Docker Compose
- PostgreSQL 15+ (or use Docker)
- Redis 7+ (or use Docker)
### Installation
```bash
# Clone and install
cd backend
npm install
# Copy environment file
cp .env.example .env
# Start infrastructure (PostgreSQL, Redis, MinIO, Besu)
docker-compose up -d postgres redis minio besu-node-1
# Run migrations
npm run migrate:latest
# Seed sample data
npm run seed:run
# Start development server
npm run start:dev
```
### Access Points
| Service | URL | Description |
|---------|-----|-------------|
| API | http://localhost:3001 | REST API |
| Swagger Docs | http://localhost:3001/api/docs | API Documentation |
| Health Check | http://localhost:3001/health | Service Health |
| MinIO Console | http://localhost:9001 | Object Storage UI |
| Besu RPC | http://localhost:8545 | Blockchain RPC |
## 📁 Project Structure
```
backend/
├── src/
│ ├── main.ts # Application entry point
│ ├── app.module.ts # Root module
│ ├── config/ # Configuration files
│ │ ├── app.config.ts
│ │ ├── database.config.ts
│ │ ├── blockchain.config.ts
│ │ ├── storage.config.ts
│ │ └── redis.config.ts
│ ├── common/ # Shared utilities
│ │ ├── constants/
│ │ ├── decorators/
│ │ ├── enums/
│ │ ├── filters/
│ │ ├── guards/
│ │ ├── interceptors/
│ │ ├── interfaces/
│ │ ├── pipes/
│ │ └── utils/
│ ├── database/ # Database layer (Knex + Objection.js)
│ │ ├── models/ # Objection.js models
│ │ ├── migrations/ # Knex migrations
│ │ ├── seeds/ # Seed data
│ │ └── knexfile.ts
│ └── modules/ # Feature modules
│ ├── auth/ # Authentication
│ ├── applicants/ # Applicant management
│ ├── departments/ # Department management
│ ├── requests/ # License requests
│ ├── documents/ # Document management
│ ├── approvals/ # Approval workflow
│ ├── workflows/ # Workflow engine
│ ├── webhooks/ # Webhook delivery
│ ├── blockchain/ # Blockchain integration
│ ├── admin/ # Admin operations
│ └── audit/ # Audit logging
├── test/ # Test suites
├── docker-compose.yml # Docker services
├── Dockerfile # Production image
└── package.json
```
## 🔧 Configuration
### Environment Variables
```bash
# Application
NODE_ENV=development
PORT=3001
API_VERSION=v1
# Database (PostgreSQL)
DATABASE_HOST=localhost
DATABASE_PORT=5432
DATABASE_NAME=goa_gel_platform
DATABASE_USER=postgres
DATABASE_PASSWORD=your_password
# Blockchain (Hyperledger Besu)
BESU_RPC_URL=http://localhost:8545
BESU_CHAIN_ID=1337
CONTRACT_ADDRESS_LICENSE_NFT=0x...
PLATFORM_WALLET_PRIVATE_KEY=0x...
# Storage (MinIO)
MINIO_ENDPOINT=localhost
MINIO_PORT=9000
MINIO_ACCESS_KEY=minioadmin
MINIO_SECRET_KEY=minioadmin
# Redis
REDIS_HOST=localhost
REDIS_PORT=6379
# Security
JWT_SECRET=your-32-char-secret-key
```
## 📚 API Documentation
### Authentication
**Department Login:**
```bash
curl -X POST http://localhost:3001/api/v1/auth/department/login \
-H "Content-Type: application/json" \
-d '{"apiKey": "fire_api_key_123", "departmentCode": "FIRE_DEPT"}'
```
**DigiLocker Login (Mock):**
```bash
curl -X POST http://localhost:3001/api/v1/auth/digilocker/login \
-H "Content-Type: application/json" \
-d '{"digilockerId": "DL-GOA-123456789", "name": "John Doe", "email": "john@example.com"}'
```
### Core Endpoints
| Method | Endpoint | Description |
|--------|----------|-------------|
| POST | `/api/v1/requests` | Create license request |
| POST | `/api/v1/requests/:id/submit` | Submit for approval |
| GET | `/api/v1/requests/:id` | Get request details |
| GET | `/api/v1/requests/pending` | Get pending requests |
| POST | `/api/v1/requests/:id/documents` | Upload document |
| POST | `/api/v1/requests/:id/approve` | Approve request |
| POST | `/api/v1/requests/:id/reject` | Reject request |
| GET | `/api/v1/workflows` | List workflows |
| POST | `/api/v1/webhooks` | Register webhook |
## 🗄️ Database
### Using Knex Migrations
```bash
# Create new migration
npm run migrate:make -- create_new_table
# Run migrations
npm run migrate:latest
# Rollback last migration
npm run migrate:rollback
# Check migration status
npm run migrate:status
# Run seeds
npm run seed:run
```
### Models (Objection.js)
- `Applicant` - User profiles linked to DigiLocker
- `Department` - Government departments with API keys
- `LicenseRequest` - License/permit applications
- `Document` - Uploaded documents with versioning
- `Approval` - Department approval records
- `Workflow` - Approval workflow definitions
- `WorkflowState` - Workflow execution state
- `Webhook` - Webhook configurations
- `AuditLog` - Immutable audit trail
- `BlockchainTransaction` - Blockchain transaction records
## 🔗 Blockchain Integration
### Smart Contracts
| Contract | Purpose |
|----------|---------|
| LicenseRequestNFT | ERC-721 Soulbound NFTs for licenses |
| ApprovalManager | Records approvals on-chain |
| DepartmentRegistry | Department registration |
| WorkflowRegistry | Workflow definitions |
### Transaction Flow
1. Request created → Draft NFT minted
2. Document uploaded → Hash recorded on-chain
3. Department approves → Approval recorded on-chain
4. All approvals complete → NFT finalized
## 🧪 Testing
```bash
# Run unit tests
npm test
# Run with coverage
npm run test:cov
# Run e2e tests
npm run test:e2e
# Watch mode
npm run test:watch
```
## 🐳 Docker Deployment
### Development
```bash
# Start all services
docker-compose up -d
# View logs
docker-compose logs -f api
# Stop services
docker-compose down
```
### Production
```bash
# Build production image
docker build -t goa-gel-api:latest .
# Run with production compose
docker-compose -f docker-compose.prod.yml up -d
```
## 📊 Monitoring
### Health Check
```bash
curl http://localhost:3001/health
```
Response:
```json
{
"status": "ok",
"timestamp": "2024-01-15T10:30:00.000Z",
"uptime": 3600,
"checks": {
"database": "ok",
"redis": "ok",
"blockchain": "ok",
"storage": "ok"
}
}
```
## 🔐 Security
- API Key authentication for departments
- JWT tokens for applicants
- RBAC (Role-Based Access Control)
- Rate limiting (100 req/min global)
- Input validation with class-validator
- SQL injection prevention (parameterized queries)
- Helmet security headers
- CORS configuration
## 📝 License
Proprietary - Government of Goa
## 🤝 Support
For technical support, contact: support@goagel.gov.in

View File

@@ -0,0 +1,344 @@
version: '3.9'
services:
# PostgreSQL Database - Production
postgres:
image: postgres:15-alpine
container_name: goa-gel-postgres-prod
restart: always
environment:
POSTGRES_USER: ${DATABASE_USER}
POSTGRES_PASSWORD: ${DATABASE_PASSWORD}
POSTGRES_DB: ${DATABASE_NAME}
POSTGRES_INITDB_ARGS: "--encoding=UTF8 --locale=C"
ports:
- "127.0.0.1:5432:5432"
volumes:
- postgres_data_prod:/var/lib/postgresql/data
- ./docker/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql:ro
- ./docker/postgres/backup.sh:/usr/local/bin/backup.sh:ro
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${DATABASE_USER} -d ${DATABASE_NAME}"]
interval: 30s
timeout: 10s
retries: 5
networks:
- gel-network
logging:
driver: "awslogs"
options:
awslogs-group: "goa-gel-postgres"
awslogs-region: ${AWS_REGION:-ap-south-1}
awslogs-stream: "postgres"
security_opt:
- no-new-privileges:true
# Redis Cache - Production
redis:
image: redis:7-alpine
container_name: goa-gel-redis-prod
restart: always
command: redis-server --requirepass ${REDIS_PASSWORD} --appendonly yes --loglevel warning
ports:
- "127.0.0.1:6379:6379"
volumes:
- redis_data_prod:/data
healthcheck:
test: ["CMD", "redis-cli", "--raw", "incr", "ping"]
interval: 30s
timeout: 10s
retries: 5
networks:
- gel-network
logging:
driver: "awslogs"
options:
awslogs-group: "goa-gel-redis"
awslogs-region: ${AWS_REGION:-ap-south-1}
awslogs-stream: "redis"
security_opt:
- no-new-privileges:true
# MinIO Object Storage - Production
minio:
image: minio/minio:latest
container_name: goa-gel-minio-prod
restart: always
environment:
MINIO_ROOT_USER: ${MINIO_ACCESS_KEY}
MINIO_ROOT_PASSWORD: ${MINIO_SECRET_KEY}
MINIO_BROWSER_REDIRECT_URL: https://minio-console.goa-gel.gov.in
ports:
- "127.0.0.1:9000:9000"
- "127.0.0.1:9001:9001"
volumes:
- minio_data_prod:/data
command: server /data --console-address ":9001" --certs-dir /etc/minio/certs
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
networks:
- gel-network
logging:
driver: "awslogs"
options:
awslogs-group: "goa-gel-minio"
awslogs-region: ${AWS_REGION:-ap-south-1}
awslogs-stream: "minio"
security_opt:
- no-new-privileges:true
# Hyperledger Besu Validator Node 1 - Production
besu-validator-1:
image: hyperledger/besu:latest
container_name: goa-gel-besu-validator-1-prod
restart: always
command: --config-file=/etc/besu/config.toml
environment:
BESU_NODE_KEY_FILE: /etc/besu/node-keys/validator-1/key
BESU_P2P_HOST: besu-validator-1
BESU_METRICS_ENABLED: "true"
BESU_METRICS_HOST: 0.0.0.0
ports:
- "127.0.0.1:8545:8545"
- "127.0.0.1:8546:8546"
- "30303:30303"
- "127.0.0.1:9545:9545"
volumes:
- ./docker/besu/config.toml:/etc/besu/config.toml:ro
- ./docker/besu/genesis.json:/etc/besu/genesis.json:ro
- ./docker/besu/node-keys/validator-1:/etc/besu/node-keys/validator-1:ro
- besu-validator-1-data-prod:/var/lib/besu
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8545"]
interval: 30s
timeout: 10s
retries: 5
networks:
- gel-network
logging:
driver: "awslogs"
options:
awslogs-group: "goa-gel-besu-validator-1"
awslogs-region: ${AWS_REGION:-ap-south-1}
awslogs-stream: "validator-1"
security_opt:
- no-new-privileges:true
# Hyperledger Besu Validator Node 2 - Production
besu-validator-2:
image: hyperledger/besu:latest
container_name: goa-gel-besu-validator-2-prod
restart: always
command: --config-file=/etc/besu/config.toml
environment:
BESU_NODE_KEY_FILE: /etc/besu/node-keys/validator-2/key
BESU_P2P_HOST: besu-validator-2
BESU_METRICS_ENABLED: "true"
BESU_METRICS_HOST: 0.0.0.0
ports:
- "127.0.0.1:8546:8545"
- "127.0.0.1:8547:8546"
- "30304:30303"
- "127.0.0.1:9546:9545"
volumes:
- ./docker/besu/config.toml:/etc/besu/config.toml:ro
- ./docker/besu/genesis.json:/etc/besu/genesis.json:ro
- ./docker/besu/node-keys/validator-2:/etc/besu/node-keys/validator-2:ro
- besu-validator-2-data-prod:/var/lib/besu
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8545"]
interval: 30s
timeout: 10s
retries: 5
depends_on:
besu-validator-1:
condition: service_healthy
networks:
- gel-network
logging:
driver: "awslogs"
options:
awslogs-group: "goa-gel-besu-validator-2"
awslogs-region: ${AWS_REGION:-ap-south-1}
awslogs-stream: "validator-2"
security_opt:
- no-new-privileges:true
# Hyperledger Besu Validator Node 3 - Production
besu-validator-3:
image: hyperledger/besu:latest
container_name: goa-gel-besu-validator-3-prod
restart: always
command: --config-file=/etc/besu/config.toml
environment:
BESU_NODE_KEY_FILE: /etc/besu/node-keys/validator-3/key
BESU_P2P_HOST: besu-validator-3
BESU_METRICS_ENABLED: "true"
BESU_METRICS_HOST: 0.0.0.0
ports:
- "127.0.0.1:8548:8545"
- "127.0.0.1:8549:8546"
- "30305:30303"
- "127.0.0.1:9547:9545"
volumes:
- ./docker/besu/config.toml:/etc/besu/config.toml:ro
- ./docker/besu/genesis.json:/etc/besu/genesis.json:ro
- ./docker/besu/node-keys/validator-3:/etc/besu/node-keys/validator-3:ro
- besu-validator-3-data-prod:/var/lib/besu
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8545"]
interval: 30s
timeout: 10s
retries: 5
depends_on:
besu-validator-2:
condition: service_healthy
networks:
- gel-network
logging:
driver: "awslogs"
options:
awslogs-group: "goa-gel-besu-validator-3"
awslogs-region: ${AWS_REGION:-ap-south-1}
awslogs-stream: "validator-3"
security_opt:
- no-new-privileges:true
# Hyperledger Besu Validator Node 4 - Production
besu-validator-4:
image: hyperledger/besu:latest
container_name: goa-gel-besu-validator-4-prod
restart: always
command: --config-file=/etc/besu/config.toml
environment:
BESU_NODE_KEY_FILE: /etc/besu/node-keys/validator-4/key
BESU_P2P_HOST: besu-validator-4
BESU_METRICS_ENABLED: "true"
BESU_METRICS_HOST: 0.0.0.0
ports:
- "127.0.0.1:8550:8545"
- "127.0.0.1:8551:8546"
- "30306:30303"
- "127.0.0.1:9548:9545"
volumes:
- ./docker/besu/config.toml:/etc/besu/config.toml:ro
- ./docker/besu/genesis.json:/etc/besu/genesis.json:ro
- ./docker/besu/node-keys/validator-4:/etc/besu/node-keys/validator-4:ro
- besu-validator-4-data-prod:/var/lib/besu
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8545"]
interval: 30s
timeout: 10s
retries: 5
depends_on:
besu-validator-3:
condition: service_healthy
networks:
- gel-network
logging:
driver: "awslogs"
options:
awslogs-group: "goa-gel-besu-validator-4"
awslogs-region: ${AWS_REGION:-ap-south-1}
awslogs-stream: "validator-4"
security_opt:
- no-new-privileges:true
# NestJS API Service - Production
api:
image: ${DOCKER_REGISTRY:-goa-gel}/api:${VERSION:-latest}
container_name: goa-gel-api-prod
restart: always
environment:
NODE_ENV: production
APP_PORT: 3001
APP_HOST: 0.0.0.0
DATABASE_HOST: postgres
DATABASE_PORT: 5432
DATABASE_NAME: ${DATABASE_NAME}
DATABASE_USER: ${DATABASE_USER}
DATABASE_PASSWORD: ${DATABASE_PASSWORD}
DATABASE_SSL: "true"
REDIS_HOST: redis
REDIS_PORT: 6379
REDIS_PASSWORD: ${REDIS_PASSWORD}
REDIS_TLS: "true"
BLOCKCHAIN_RPC_URL: http://besu-validator-1:8545
BLOCKCHAIN_CHAIN_ID: ${BLOCKCHAIN_CHAIN_ID:-1337}
BLOCKCHAIN_GAS_PRICE: ${BLOCKCHAIN_GAS_PRICE:-1000000000}
BLOCKCHAIN_GAS_LIMIT: ${BLOCKCHAIN_GAS_LIMIT:-6000000}
BLOCKCHAIN_PRIVATE_KEY: ${BLOCKCHAIN_PRIVATE_KEY}
MINIO_ENDPOINT: minio
MINIO_PORT: 9000
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
MINIO_USE_SSL: "true"
JWT_SECRET: ${JWT_SECRET}
JWT_EXPIRATION: ${JWT_EXPIRATION:-7d}
CORS_ORIGIN: ${CORS_ORIGIN}
LOG_LEVEL: ${LOG_LEVEL:-warn}
ENABLE_BLOCKCHAIN_VERIFICATION: "true"
ENABLE_AUDIT_LOGGING: "true"
ENABLE_RATE_LIMITING: "true"
SENTRY_DSN: ${SENTRY_DSN}
ports:
- "127.0.0.1:3001:3001"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
minio:
condition: service_healthy
besu-validator-1:
condition: service_healthy
networks:
- gel-network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3001/health"]
interval: 30s
timeout: 10s
start-period: 60s
retries: 5
logging:
driver: "awslogs"
options:
awslogs-group: "goa-gel-api"
awslogs-region: ${AWS_REGION:-ap-south-1}
awslogs-stream: "api"
security_opt:
- no-new-privileges:true
deploy:
resources:
limits:
cpus: '2'
memory: 1G
reservations:
cpus: '1'
memory: 512M
networks:
gel-network:
driver: bridge
ipam:
config:
- subnet: 172.20.0.0/16
volumes:
postgres_data_prod:
driver: local
redis_data_prod:
driver: local
minio_data_prod:
driver: local
besu-validator-1-data-prod:
driver: local
besu-validator-2-data-prod:
driver: local
besu-validator-3-data-prod:
driver: local
besu-validator-4-data-prod:
driver: local

238
backend/docker-compose.yml Normal file
View File

@@ -0,0 +1,238 @@
services:
# ================================
# NestJS API Backend
# ================================
api:
build:
context: .
dockerfile: Dockerfile
container_name: goa-gel-api
restart: unless-stopped
ports:
- "3001:3001"
environment:
- NODE_ENV=development
- PORT=3001
- DATABASE_HOST=postgres
- DATABASE_PORT=5432
- DATABASE_NAME=goa_gel_platform
- DATABASE_USER=postgres
- DATABASE_PASSWORD=postgres_secure_password
- REDIS_HOST=redis
- REDIS_PORT=6379
- MINIO_ENDPOINT=minio
- MINIO_PORT=9000
- MINIO_ACCESS_KEY=minioadmin
- MINIO_SECRET_KEY=minioadmin_secure
- MINIO_BUCKET_DOCUMENTS=goa-gel-documents
- BESU_RPC_URL=http://besu-node-1:8545
- BESU_CHAIN_ID=1337
- BESU_NETWORK_ID=2024
- CONTRACT_ADDRESS_LICENSE_NFT=${CONTRACT_ADDRESS_LICENSE_NFT}
- CONTRACT_ADDRESS_APPROVAL_MANAGER=${CONTRACT_ADDRESS_APPROVAL_MANAGER}
- CONTRACT_ADDRESS_DEPARTMENT_REGISTRY=${CONTRACT_ADDRESS_DEPARTMENT_REGISTRY}
- CONTRACT_ADDRESS_WORKFLOW_REGISTRY=${CONTRACT_ADDRESS_WORKFLOW_REGISTRY}
- PLATFORM_WALLET_PRIVATE_KEY=${PLATFORM_WALLET_PRIVATE_KEY}
- JWT_SECRET=your-super-secure-jwt-secret-key-min-32-chars-long
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
minio:
condition: service_healthy
networks:
- goa-gel-network
volumes:
- ./src:/app/src:ro
healthcheck:
test: ["CMD", "wget", "--spider", "-q", "http://localhost:3001/health"]
interval: 30s
timeout: 10s
retries: 3
# ================================
# PostgreSQL Database
# ================================
postgres:
image: postgres:15-alpine
container_name: goa-gel-postgres
restart: unless-stopped
ports:
- "5432:5432"
environment:
- POSTGRES_DB=goa_gel_platform
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres_secure_password
volumes:
- postgres_data:/var/lib/postgresql/data
networks:
- goa-gel-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres -d goa_gel_platform"]
interval: 10s
timeout: 5s
retries: 5
# ================================
# Redis Cache & Queue
# ================================
redis:
image: redis:7-alpine
container_name: goa-gel-redis
restart: unless-stopped
ports:
- "6379:6379"
command: redis-server --appendonly yes
volumes:
- redis_data:/data
networks:
- goa-gel-network
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
# ================================
# MinIO Object Storage
# ================================
minio:
image: minio/minio:latest
container_name: goa-gel-minio
restart: unless-stopped
ports:
- "9000:9000"
- "9001:9001"
environment:
- MINIO_ROOT_USER=minioadmin
- MINIO_ROOT_PASSWORD=minioadmin_secure
command: server /data --console-address ":9001"
volumes:
- minio_data:/data
networks:
- goa-gel-network
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 3
# ================================
# Hyperledger Besu Dev Node (Auto-mining)
# ================================
besu-node-1:
image: hyperledger/besu:24.1.0
container_name: goa-gel-besu-1
restart: unless-stopped
user: root
ports:
- "8545:8545"
- "8546:8546"
- "30303:30303"
command:
- --network=dev
- --miner-enabled
- --miner-coinbase=0xfe3b557e8fb62b89f4916b721be55ceb828dbd73
- --rpc-http-enabled
- --rpc-http-host=0.0.0.0
- --rpc-http-port=8545
- --rpc-http-cors-origins=*
- --rpc-http-api=ETH,NET,WEB3,DEBUG,MINER,ADMIN,TXPOOL,TRACE
- --rpc-ws-enabled
- --rpc-ws-host=0.0.0.0
- --rpc-ws-port=8546
- --host-allowlist=*
- --min-gas-price=0
- --data-path=/var/lib/besu
volumes:
- besu_data_1:/var/lib/besu
networks:
- goa-gel-network
healthcheck:
test: ["CMD-SHELL", "exit 0"]
interval: 30s
timeout: 10s
retries: 3
# ================================
# Blockscout Database
# ================================
blockscout-db:
image: postgres:15-alpine
container_name: goa-gel-blockscout-db
restart: unless-stopped
environment:
POSTGRES_DB: blockscout
POSTGRES_USER: blockscout
POSTGRES_PASSWORD: blockscout_secure
volumes:
- blockscout_db_data:/var/lib/postgresql/data
networks:
- goa-gel-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U blockscout -d blockscout"]
interval: 10s
timeout: 5s
retries: 5
# ================================
# Blockscout Explorer
# ================================
blockscout:
image: blockscout/blockscout:6.3.0
container_name: goa-gel-blockscout
restart: unless-stopped
ports:
- "4000:4000"
environment:
DATABASE_URL: postgresql://blockscout:blockscout_secure@blockscout-db:5432/blockscout
ETHEREUM_JSONRPC_VARIANT: besu
ETHEREUM_JSONRPC_HTTP_URL: http://besu-node-1:8545
ETHEREUM_JSONRPC_WS_URL: ws://besu-node-1:8546
ETHEREUM_JSONRPC_TRACE_URL: http://besu-node-1:8545
NETWORK: Goa-GEL Private Network
SUBNETWORK: Development
LOGO: /images/blockscout_logo.svg
LOGO_FOOTER: /images/blockscout_logo.svg
COIN: ETH
COIN_NAME: Ether
INDEXER_DISABLE_PENDING_TRANSACTIONS_FETCHER: "true"
INDEXER_DISABLE_INTERNAL_TRANSACTIONS_FETCHER: "false"
FETCH_REWARDS_WAY: trace_block
TRACE_FIRST_BLOCK: "0"
TRACE_LAST_BLOCK: ""
POOL_SIZE: 80
POOL_SIZE_API: 10
ECTO_USE_SSL: "false"
SECRET_KEY_BASE: RMgI4C1HSkxsEjdhtGMfwAHfyT6CKWXOgzCboJflfSm4jeAlic52io05KB6mqzc5
PORT: 4000
DISABLE_EXCHANGE_RATES: "true"
SHOW_TXS_CHART: "true"
HISTORY_FETCH_INTERVAL: 30
TXS_HISTORIAN_INIT_LAG: 0
TXS_STATS_DAYS_TO_COMPILE_AT_INIT: 10
HEART_BEAT_TIMEOUT: 60
BLOCKSCOUT_HOST: localhost
BLOCKSCOUT_PROTOCOL: http
API_V2_ENABLED: "true"
MIX_ENV: prod
depends_on:
blockscout-db:
condition: service_healthy
besu-node-1:
condition: service_healthy
networks:
- goa-gel-network
command: sh -c "bin/blockscout eval \"Elixir.Explorer.ReleaseTasks.create_and_migrate()\" && bin/blockscout start"
networks:
goa-gel-network:
driver: bridge
volumes:
postgres_data:
redis_data:
minio_data:
besu_data_1:
blockscout_db_data:

View File

@@ -0,0 +1,50 @@
[Node]
data-path="/var/lib/besu"
p2p-port=30303
p2p-host="0.0.0.0"
rpc-http-enabled=true
rpc-http-host="0.0.0.0"
rpc-http-port=8545
rpc-http-api=["ETH", "NET", "WEB3", "ADMIN", "QBFT"]
rpc-http-cors-origins=["http://localhost:3000", "http://localhost:3001", "http://localhost:8080"]
rpc-ws-enabled=true
rpc-ws-host="0.0.0.0"
rpc-ws-port=8546
rpc-ws-api=["ETH", "NET", "WEB3", "ADMIN", "QBFT"]
graphql-http-enabled=false
sync-mode="FAST"
pruning-enabled=true
pruning-blocks-retained=1024
block-gas-limit=6000000
max-peers=30
max-inbound-connections=10
max-outbound-connections=20
min-block-fill-percentage=80
miner-enabled=false
discovery-enabled=true
discovery-dns-url="enrtree://AKA3AM_xupxQufGBg7EspalDjrWT0RD94jj_qc52cpgfUmu@nodes.goa-gel.gov.in"
logging="INFO"
host-allowlist=["localhost", "127.0.0.1", "besu-validator-1", "besu-validator-2", "besu-validator-3", "besu-validator-4"]
[Network]
bootnodes=[
"enode://9723eb17ebf1d4d00aba7d9c1bf7b9e5ceae8e4f4cf9f9a6e8f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f@besu-validator-1:30303",
"enode://2711dc881909b83b5ec4009e8815ebdcf7eaea3c1f4cf9f9a6e8f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f@besu-validator-2:30303",
"enode://27c0ca7c79b26c325581434091beb595f38e8abc1f4cf9f9a6e8f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f@besu-validator-3:30303",
"enode://d96245571cac7631eac214ba82cbf90f1f1ea2811f4cf9f9a6e8f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f@besu-validator-4:30303"
]
[Consensus]
consensus="qbft"
[Metrics]
metrics-enabled=true
metrics-host="0.0.0.0"
metrics-port=9545
metrics-protocol="PROMETHEUS"
[Privacy]
privacy-enabled=false
[Gas]
target-gas-limit=6000000

View File

@@ -0,0 +1,45 @@
{
"config": {
"chainId": 1337,
"homesteadBlock": 0,
"eip150Block": 0,
"eip155Block": 0,
"eip158Block": 0,
"byzantiumBlock": 0,
"constantinopleBlock": 0,
"petersburgBlock": 0,
"istanbulBlock": 0,
"muirGlacierBlock": 0,
"berlinBlock": 0,
"londonBlock": 0,
"arrowGlacierBlock": 0,
"grayGlacierBlock": 0,
"qbft": {
"blockperiodseconds": 2,
"epochlength": 30000,
"requesttimeoutseconds": 4,
"validaterroundrobintimeout": 0
}
},
"nonce": "0x0",
"timestamp": "0x58ee40ba",
"extraData": "0xf83ea00000000000000000000000000000000000000000000000000000000000000000d5949723eb17ebf1d4d00aba7d9c1bf7b9e5ceae8e4f4cf9f9a6e8f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f8e7f80c0",
"gasLimit": "0x47b760",
"difficulty": "0x1",
"mixHash": "0x63746963616c2062797a616e74696e652066617566742074six656c6572616e6365",
"coinbase": "0x0000000000000000000000000000000000000000",
"alloc": {
"9723eb17ebf1d4d00aba7d9c1bf7b9e5ceae8e4": {
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
},
"2711dc881909b83b5ec4009e8815ebdcf7eaea3c": {
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
},
"27c0ca7c79b26c325581434091beb595f38e8abc": {
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
},
"d96245571cac7631eac214ba82cbf90f1f1ea281": {
"balance": "0x200000000000000000000000000000000000000000000000000000000000000"
}
}
}

View File

@@ -0,0 +1 @@
9f02d7c45e9e3f1a8b7d6c5e4f3a2b1c9f02d7c45e9e3f1a8b7d6c5e4f3a2b

View File

@@ -0,0 +1 @@
a1e8f2b3c4d5e6f7a8b9c0d1e2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9

18
backend/gen-hashes.js Normal file
View File

@@ -0,0 +1,18 @@
const bcrypt = require('bcrypt');
const passwords = {
'Admin@123': 'admin@goa.gov.in',
'Fire@123': 'fire@goa.gov.in',
'Tourism@123': 'tourism@goa.gov.in',
'Municipality@123': 'municipality@goa.gov.in',
'Citizen@123': 'citizen@example.com'
};
async function generateHashes() {
for (const [password, email] of Object.entries(passwords)) {
const hash = await bcrypt.hash(password, 10);
console.log(`${email}: ${hash}`);
}
}
generateHashes().catch(console.error);

29
backend/jest.config.js Normal file
View File

@@ -0,0 +1,29 @@
module.exports = {
moduleFileExtensions: ['js', 'json', 'ts'],
rootDir: 'src',
testRegex: '.*\\.spec\\.ts$',
transform: {
'^.+\\.(t|j)s$': 'ts-jest',
},
collectCoverageFrom: [
'**/*.(t|j)s',
'!**/*.module.ts',
'!**/node_modules/**',
'!**/dist/**',
],
coverageDirectory: '../coverage',
testEnvironment: 'node',
roots: ['<rootDir>', '<rootDir>/../test'],
moduleNameMapper: {
'^@/(.*)$': '<rootDir>/$1',
'^@config/(.*)$': '<rootDir>/config/$1',
'^@common/(.*)$': '<rootDir>/common/$1',
'^@modules/(.*)$': '<rootDir>/modules/$1',
'^@database/(.*)$': '<rootDir>/database/$1',
'^@blockchain/(.*)$': '<rootDir>/blockchain/$1',
'^@storage/(.*)$': '<rootDir>/storage/$1',
'^@queue/(.*)$': '<rootDir>/queue/$1',
},
coveragePathIgnorePatterns: ['/node_modules/'],
testPathIgnorePatterns: ['/node_modules/', '/dist/'],
};

11
backend/nest-cli.json Normal file
View File

@@ -0,0 +1,11 @@
{
"$schema": "https://json.schemastore.org/nest-cli",
"collection": "@nestjs/schematics",
"sourceRoot": "src",
"compilerOptions": {
"deleteOutDir": true,
"webpack": false,
"assets": ["**/*.json"],
"watchAssets": true
}
}

11989
backend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

99
backend/package.json Normal file
View File

@@ -0,0 +1,99 @@
{
"name": "goa-gel-backend",
"version": "1.0.0",
"description": "Blockchain Document Verification Platform for Government of Goa",
"author": "Government of Goa",
"license": "PROPRIETARY",
"scripts": {
"build": "nest build",
"format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"",
"start": "nest start",
"start:dev": "nest start --watch",
"start:debug": "nest start --debug --watch",
"start:prod": "node dist/main",
"lint": "eslint \"{src,test}/**/*.ts\" --fix",
"test": "jest",
"test:watch": "jest --watch",
"test:cov": "jest --coverage",
"test:e2e": "jest --config ./test/jest-e2e.json",
"knex": "knex --knexfile src/database/knexfile.ts",
"migrate:make": "npm run knex -- migrate:make",
"migrate:latest": "npm run knex -- migrate:latest",
"migrate:rollback": "npm run knex -- migrate:rollback",
"migrate:status": "npm run knex -- migrate:status",
"seed:make": "npm run knex -- seed:make",
"seed:run": "npm run knex -- seed:run"
},
"dependencies": {
"@nestjs/bull": "10.0.1",
"@nestjs/common": "10.3.0",
"@nestjs/config": "3.1.1",
"@nestjs/core": "10.3.0",
"@nestjs/jwt": "10.2.0",
"@nestjs/passport": "10.0.3",
"@nestjs/platform-express": "10.3.0",
"@nestjs/swagger": "7.2.0",
"@nestjs/throttler": "5.1.1",
"bcrypt": "5.1.1",
"bull": "4.12.0",
"class-transformer": "0.5.1",
"class-validator": "0.14.1",
"compression": "1.7.4",
"ethers": "6.10.0",
"helmet": "7.1.0",
"ioredis": "5.3.2",
"joi": "17.12.0",
"knex": "3.1.0",
"minio": "7.1.3",
"nest-winston": "1.9.4",
"objection": "3.1.4",
"passport": "0.7.0",
"passport-jwt": "4.0.1",
"pg": "8.11.3",
"reflect-metadata": "0.1.14",
"rxjs": "7.8.1",
"uuid": "9.0.1",
"winston": "3.11.0"
},
"devDependencies": {
"@nestjs/cli": "10.3.0",
"@nestjs/schematics": "10.1.0",
"@nestjs/testing": "10.3.0",
"@types/bcrypt": "5.0.2",
"@types/compression": "1.7.5",
"@types/express": "4.17.21",
"@types/jest": "29.5.11",
"@types/multer": "1.4.11",
"@types/node": "20.11.5",
"@types/passport-jwt": "4.0.0",
"@types/supertest": "6.0.2",
"@types/uuid": "9.0.7",
"@typescript-eslint/eslint-plugin": "6.19.0",
"@typescript-eslint/parser": "6.19.0",
"eslint": "8.56.0",
"eslint-config-prettier": "9.1.0",
"eslint-plugin-prettier": "5.1.3",
"jest": "29.7.0",
"prettier": "3.2.4",
"source-map-support": "0.5.21",
"supertest": "6.3.4",
"ts-jest": "29.1.2",
"ts-loader": "9.5.1",
"ts-node": "10.9.2",
"tsconfig-paths": "4.2.0",
"typescript": "5.3.3"
},
"jest": {
"moduleFileExtensions": ["js", "json", "ts"],
"rootDir": "src",
"testRegex": ".*\\.spec\\.ts$",
"transform": { "^.+\\.(t|j)s$": "ts-jest" },
"collectCoverageFrom": ["**/*.(t|j)s"],
"coverageDirectory": "../coverage",
"testEnvironment": "node",
"coverageThreshold": {
"global": { "branches": 80, "functions": 80, "lines": 80, "statements": 80 }
}
},
"engines": { "node": ">=18.0.0" }
}

View File

@@ -0,0 +1,304 @@
-- Enable UUID extension
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
-- =============================================
-- MIGRATION 1: Initial Schema
-- =============================================
-- Applicants table
CREATE TABLE IF NOT EXISTS applicants (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
digilocker_id VARCHAR(255) NOT NULL UNIQUE,
name VARCHAR(255) NOT NULL,
email VARCHAR(255) NOT NULL,
phone VARCHAR(20),
wallet_address VARCHAR(42),
is_active BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_applicant_digilocker ON applicants(digilocker_id);
CREATE INDEX IF NOT EXISTS idx_applicant_email ON applicants(email);
-- Departments table
CREATE TABLE IF NOT EXISTS departments (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
code VARCHAR(50) NOT NULL UNIQUE,
name VARCHAR(255) NOT NULL,
wallet_address VARCHAR(42) UNIQUE,
api_key_hash VARCHAR(255),
api_secret_hash VARCHAR(255),
webhook_url VARCHAR(500),
webhook_secret_hash VARCHAR(255),
is_active BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
description TEXT,
contact_email VARCHAR(255),
contact_phone VARCHAR(20),
last_webhook_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_department_code ON departments(code);
CREATE INDEX IF NOT EXISTS idx_department_active ON departments(is_active);
-- Workflows table
CREATE TABLE IF NOT EXISTS workflows (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
workflow_type VARCHAR(100) NOT NULL UNIQUE,
name VARCHAR(255) NOT NULL,
description TEXT,
version INTEGER NOT NULL DEFAULT 1,
definition JSONB NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT true,
created_by UUID,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_workflow_type ON workflows(workflow_type);
CREATE INDEX IF NOT EXISTS idx_workflow_active ON workflows(is_active);
-- License Requests table
CREATE TABLE IF NOT EXISTS license_requests (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_number VARCHAR(50) NOT NULL UNIQUE,
token_id BIGINT,
applicant_id UUID NOT NULL REFERENCES applicants(id) ON DELETE CASCADE,
request_type VARCHAR(100) NOT NULL,
workflow_id UUID REFERENCES workflows(id) ON DELETE SET NULL,
status VARCHAR(50) NOT NULL DEFAULT 'DRAFT',
metadata JSONB,
current_stage_id VARCHAR(100),
blockchain_tx_hash VARCHAR(66),
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
submitted_at TIMESTAMP,
approved_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_request_number ON license_requests(request_number);
CREATE INDEX IF NOT EXISTS idx_request_applicant ON license_requests(applicant_id);
CREATE INDEX IF NOT EXISTS idx_request_status ON license_requests(status);
CREATE INDEX IF NOT EXISTS idx_request_type ON license_requests(request_type);
CREATE INDEX IF NOT EXISTS idx_request_created ON license_requests(created_at);
CREATE INDEX IF NOT EXISTS idx_request_status_type ON license_requests(status, request_type);
-- Documents table
CREATE TABLE IF NOT EXISTS documents (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES license_requests(id) ON DELETE CASCADE,
doc_type VARCHAR(100) NOT NULL,
original_filename VARCHAR(255) NOT NULL,
current_version INTEGER NOT NULL DEFAULT 1,
current_hash VARCHAR(66) NOT NULL,
minio_bucket VARCHAR(100) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_document_request ON documents(request_id);
CREATE INDEX IF NOT EXISTS idx_document_type ON documents(doc_type);
-- Document Versions table
CREATE TABLE IF NOT EXISTS document_versions (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
document_id UUID NOT NULL REFERENCES documents(id) ON DELETE CASCADE,
version INTEGER NOT NULL,
hash VARCHAR(66) NOT NULL,
minio_path VARCHAR(500) NOT NULL,
file_size BIGINT NOT NULL,
mime_type VARCHAR(100) NOT NULL,
uploaded_by UUID NOT NULL,
blockchain_tx_hash VARCHAR(66),
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(document_id, version)
);
CREATE INDEX IF NOT EXISTS idx_docversion_document ON document_versions(document_id);
-- Approvals table
CREATE TABLE IF NOT EXISTS approvals (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL REFERENCES license_requests(id) ON DELETE CASCADE,
department_id UUID NOT NULL REFERENCES departments(id) ON DELETE CASCADE,
status VARCHAR(50) NOT NULL DEFAULT 'PENDING',
remarks TEXT,
remarks_hash VARCHAR(66),
reviewed_documents JSONB,
blockchain_tx_hash VARCHAR(66),
is_active BOOLEAN NOT NULL DEFAULT true,
invalidated_at TIMESTAMP,
invalidation_reason VARCHAR(255),
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_approval_request ON approvals(request_id);
CREATE INDEX IF NOT EXISTS idx_approval_department ON approvals(department_id);
CREATE INDEX IF NOT EXISTS idx_approval_status ON approvals(status);
CREATE INDEX IF NOT EXISTS idx_approval_request_dept ON approvals(request_id, department_id);
-- Workflow States table
CREATE TABLE IF NOT EXISTS workflow_states (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
request_id UUID NOT NULL UNIQUE REFERENCES license_requests(id) ON DELETE CASCADE,
current_stage_id VARCHAR(100) NOT NULL,
completed_stages JSONB NOT NULL DEFAULT '[]',
pending_approvals JSONB NOT NULL DEFAULT '[]',
execution_log JSONB NOT NULL DEFAULT '[]',
stage_started_at TIMESTAMP,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_wfstate_request ON workflow_states(request_id);
-- Webhooks table
CREATE TABLE IF NOT EXISTS webhooks (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
department_id UUID NOT NULL REFERENCES departments(id) ON DELETE CASCADE,
url VARCHAR(500) NOT NULL,
events JSONB NOT NULL,
secret_hash VARCHAR(255) NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_webhook_department ON webhooks(department_id);
-- Webhook Logs table
CREATE TABLE IF NOT EXISTS webhook_logs (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
webhook_id UUID NOT NULL REFERENCES webhooks(id) ON DELETE CASCADE,
event_type VARCHAR(100) NOT NULL,
payload JSONB NOT NULL,
response_status INTEGER,
response_body TEXT,
response_time INTEGER,
retry_count INTEGER NOT NULL DEFAULT 0,
status VARCHAR(20) NOT NULL DEFAULT 'PENDING',
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_webhooklog_webhook ON webhook_logs(webhook_id);
CREATE INDEX IF NOT EXISTS idx_webhooklog_event ON webhook_logs(event_type);
CREATE INDEX IF NOT EXISTS idx_webhooklog_status ON webhook_logs(status);
CREATE INDEX IF NOT EXISTS idx_webhooklog_created ON webhook_logs(created_at);
-- Audit Logs table
CREATE TABLE IF NOT EXISTS audit_logs (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
entity_type VARCHAR(50) NOT NULL,
entity_id UUID NOT NULL,
action VARCHAR(50) NOT NULL,
actor_type VARCHAR(50) NOT NULL,
actor_id UUID,
old_value JSONB,
new_value JSONB,
ip_address VARCHAR(45),
user_agent TEXT,
correlation_id VARCHAR(100),
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_audit_entity ON audit_logs(entity_type, entity_id);
CREATE INDEX IF NOT EXISTS idx_audit_entitytype ON audit_logs(entity_type);
CREATE INDEX IF NOT EXISTS idx_audit_action ON audit_logs(action);
CREATE INDEX IF NOT EXISTS idx_audit_created ON audit_logs(created_at);
CREATE INDEX IF NOT EXISTS idx_audit_correlation ON audit_logs(correlation_id);
-- Blockchain Transactions table
CREATE TABLE IF NOT EXISTS blockchain_transactions (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
tx_hash VARCHAR(66) NOT NULL UNIQUE,
tx_type VARCHAR(50) NOT NULL,
related_entity_type VARCHAR(50) NOT NULL,
related_entity_id UUID NOT NULL,
from_address VARCHAR(42) NOT NULL,
to_address VARCHAR(42),
status VARCHAR(20) NOT NULL DEFAULT 'PENDING',
block_number BIGINT,
gas_used BIGINT,
error_message TEXT,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
confirmed_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_bctx_hash ON blockchain_transactions(tx_hash);
CREATE INDEX IF NOT EXISTS idx_bctx_type ON blockchain_transactions(tx_type);
CREATE INDEX IF NOT EXISTS idx_bctx_status ON blockchain_transactions(status);
CREATE INDEX IF NOT EXISTS idx_bctx_entity ON blockchain_transactions(related_entity_id);
CREATE INDEX IF NOT EXISTS idx_bctx_created ON blockchain_transactions(created_at);
-- =============================================
-- MIGRATION 2: Users, Wallets, Events, Logs
-- =============================================
-- Users table for email/password authentication
CREATE TABLE IF NOT EXISTS users (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
email VARCHAR(255) NOT NULL UNIQUE,
password_hash VARCHAR(255) NOT NULL,
name VARCHAR(255) NOT NULL,
role VARCHAR(20) NOT NULL CHECK (role IN ('ADMIN', 'DEPARTMENT', 'CITIZEN')),
department_id UUID REFERENCES departments(id) ON DELETE SET NULL,
wallet_address VARCHAR(42),
wallet_encrypted_key TEXT,
phone VARCHAR(20),
is_active BOOLEAN NOT NULL DEFAULT true,
last_login_at TIMESTAMP,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_user_email ON users(email);
CREATE INDEX IF NOT EXISTS idx_user_role ON users(role);
CREATE INDEX IF NOT EXISTS idx_user_department ON users(department_id);
CREATE INDEX IF NOT EXISTS idx_user_active ON users(is_active);
-- Wallets table for storing encrypted private keys
CREATE TABLE IF NOT EXISTS wallets (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
address VARCHAR(42) NOT NULL UNIQUE,
encrypted_private_key TEXT NOT NULL,
owner_type VARCHAR(20) NOT NULL CHECK (owner_type IN ('USER', 'DEPARTMENT')),
owner_id UUID NOT NULL,
is_active BOOLEAN NOT NULL DEFAULT true,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_wallet_address ON wallets(address);
CREATE INDEX IF NOT EXISTS idx_wallet_owner ON wallets(owner_type, owner_id);
-- Blockchain events table
CREATE TABLE IF NOT EXISTS blockchain_events (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
tx_hash VARCHAR(66) NOT NULL,
event_name VARCHAR(100) NOT NULL,
contract_address VARCHAR(42) NOT NULL,
block_number BIGINT NOT NULL,
log_index INTEGER NOT NULL,
args JSONB NOT NULL,
decoded_args JSONB,
related_entity_type VARCHAR(50),
related_entity_id UUID,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
UNIQUE(tx_hash, log_index)
);
CREATE INDEX IF NOT EXISTS idx_event_tx ON blockchain_events(tx_hash);
CREATE INDEX IF NOT EXISTS idx_event_name ON blockchain_events(event_name);
CREATE INDEX IF NOT EXISTS idx_event_contract ON blockchain_events(contract_address);
CREATE INDEX IF NOT EXISTS idx_event_block ON blockchain_events(block_number);
CREATE INDEX IF NOT EXISTS idx_event_created ON blockchain_events(created_at);
CREATE INDEX IF NOT EXISTS idx_event_entity ON blockchain_events(related_entity_type, related_entity_id);
-- Application logs table
CREATE TABLE IF NOT EXISTS application_logs (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
level VARCHAR(10) NOT NULL CHECK (level IN ('DEBUG', 'INFO', 'WARN', 'ERROR')),
module VARCHAR(100) NOT NULL,
message TEXT NOT NULL,
context JSONB,
stack_trace TEXT,
user_id UUID,
correlation_id VARCHAR(100),
ip_address VARCHAR(45),
user_agent TEXT,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_applog_level ON application_logs(level);
CREATE INDEX IF NOT EXISTS idx_applog_module ON application_logs(module);
CREATE INDEX IF NOT EXISTS idx_applog_user ON application_logs(user_id);
CREATE INDEX IF NOT EXISTS idx_applog_correlation ON application_logs(correlation_id);
CREATE INDEX IF NOT EXISTS idx_applog_created ON application_logs(created_at);

View File

@@ -0,0 +1,45 @@
#!/bin/bash
set -e
echo "🚀 Starting Goa-GEL Backend Initialization..."
# Function to check if this is first boot
is_first_boot() {
if [ ! -f "/app/data/.initialized" ]; then
return 0 # true
else
return 1 # false
fi
}
# Ensure data directory exists
mkdir -p /app/data
# Ensure .env file exists
touch /app/.env
# 1. Wait for and initialize database
echo "📊 Step 1: Database initialization..."
chmod +x /app/scripts/init-db.sh
/app/scripts/init-db.sh
# 2. Initialize blockchain (only on first boot or if not configured)
if is_first_boot || [ -z "$CONTRACT_ADDRESS_LICENSE_NFT" ] || [ "$CONTRACT_ADDRESS_LICENSE_NFT" = "0x0000000000000000000000000000000000000000" ]; then
echo "🔗 Step 2: Blockchain initialization..."
node /app/scripts/init-blockchain.js
# Mark as initialized
touch /app/data/.initialized
echo "✅ Blockchain initialization complete!"
# Reload environment variables
if [ -f "/app/.env" ]; then
export $(grep -v '^#' /app/.env | xargs)
fi
else
echo "⏭️ Step 2: Blockchain already initialized"
fi
# 3. Start the application
echo "🎯 Step 3: Starting NestJS application..."
exec npm run start:prod

View File

@@ -0,0 +1,175 @@
const { ethers } = require('ethers');
const fs = require('fs');
const path = require('path');
/**
* Initialize blockchain infrastructure:
* - Generate platform wallet
* - Deploy smart contracts
* - Update .env file with addresses
*/
async function initBlockchain() {
console.log('🔗 Initializing blockchain infrastructure...');
const provider = new ethers.JsonRpcProvider(
process.env.BESU_RPC_URL || 'http://localhost:8545'
);
// Wait for blockchain to be ready
console.log('⏳ Waiting for blockchain to be ready...');
let retries = 30;
while (retries > 0) {
try {
await provider.getBlockNumber();
console.log('✅ Blockchain is ready!');
break;
} catch (error) {
retries--;
if (retries === 0) {
throw new Error('Blockchain not available after 30 retries');
}
await new Promise(resolve => setTimeout(resolve, 2000));
}
}
// Check if already initialized
const envPath = path.join(__dirname, '../.env');
if (fs.existsSync(envPath)) {
const envContent = fs.readFileSync(envPath, 'utf8');
if (
envContent.includes('CONTRACT_ADDRESS_LICENSE_NFT=0x') &&
!envContent.includes('CONTRACT_ADDRESS_LICENSE_NFT=0x0000000000000000000000000000000000000000')
) {
console.log('✅ Blockchain already initialized, skipping deployment');
return;
}
}
// 1. Generate Platform Wallet
console.log('🔐 Generating platform wallet...');
const platformWallet = ethers.Wallet.createRandom();
console.log('📝 Platform Wallet Address:', platformWallet.address);
console.log('🔑 Platform Wallet Mnemonic:', platformWallet.mnemonic.phrase);
// Fund the platform wallet from the dev network's pre-funded account
console.log('💰 Funding platform wallet...');
const devWallet = new ethers.Wallet(
'0x8f2a55949038a9610f50fb23b5883af3b4ecb3c3bb792cbcefbd1542c692be63',
provider
);
const fundTx = await devWallet.sendTransaction({
to: platformWallet.address,
value: ethers.parseEther('100.0'),
});
await fundTx.wait();
console.log('✅ Platform wallet funded with 100 ETH');
const connectedWallet = platformWallet.connect(provider);
// 2. Deploy Smart Contracts
console.log('📜 Deploying smart contracts...');
const contracts = await deployContracts(connectedWallet);
// 3. Update .env file
console.log('📝 Updating .env file...');
updateEnvFile({
PLATFORM_WALLET_PRIVATE_KEY: platformWallet.privateKey,
PLATFORM_WALLET_ADDRESS: platformWallet.address,
PLATFORM_WALLET_MNEMONIC: platformWallet.mnemonic.phrase,
CONTRACT_ADDRESS_LICENSE_NFT: contracts.licenseNFT,
CONTRACT_ADDRESS_APPROVAL_MANAGER: contracts.approvalManager,
CONTRACT_ADDRESS_DEPARTMENT_REGISTRY: contracts.departmentRegistry,
CONTRACT_ADDRESS_WORKFLOW_REGISTRY: contracts.workflowRegistry,
});
console.log('✅ Blockchain initialization complete!');
console.log('\n📋 Summary:');
console.log(' Platform Wallet:', platformWallet.address);
console.log(' License NFT:', contracts.licenseNFT);
console.log(' Approval Manager:', contracts.approvalManager);
console.log(' Department Registry:', contracts.departmentRegistry);
console.log(' Workflow Registry:', contracts.workflowRegistry);
}
/**
* Deploy all smart contracts
*/
async function deployContracts(wallet) {
// Simple deployment of placeholder contracts
// In production, you would deploy your actual Solidity contracts here
console.log('🚀 Deploying License NFT contract...');
const licenseNFT = await deployPlaceholderContract(wallet, 'LicenseNFT');
console.log('🚀 Deploying Approval Manager contract...');
const approvalManager = await deployPlaceholderContract(wallet, 'ApprovalManager');
console.log('🚀 Deploying Department Registry contract...');
const departmentRegistry = await deployPlaceholderContract(wallet, 'DepartmentRegistry');
console.log('🚀 Deploying Workflow Registry contract...');
const workflowRegistry = await deployPlaceholderContract(wallet, 'WorkflowRegistry');
return {
licenseNFT,
approvalManager,
departmentRegistry,
workflowRegistry,
};
}
/**
* Deploy a placeholder contract (simple storage contract)
*/
async function deployPlaceholderContract(wallet, name) {
// Simple contract that just stores a value
const bytecode = '0x608060405234801561001057600080fd5b5060c78061001f6000396000f3fe6080604052348015600f57600080fd5b506004361060325760003560e01c80632e64cec11460375780636057361d146051575b600080fd5b603d6069565b6040516048919060a2565b60405180910390f35b6067600480360381019060639190606f565b6072565b005b60008054905090565b8060008190555050565b6000813590506079816000ad565b92915050565b6000602082840312156000608257600080fd5b6000608e84828501607c565b91505092915050565b609c8160bb565b82525050565b600060208201905060b560008301846095565b92915050565b600081905091905056fea26469706673582212203a8e2f9c8e98b9f5e8c7d6e5f4c3b2a19087868756463524f3e2d1c0b9a8f76464736f6c63430008110033';
const deployTx = await wallet.sendTransaction({
data: bytecode,
});
const receipt = await deployTx.wait();
const address = receipt.contractAddress;
console.log(`${name} deployed at:`, address);
return address;
}
/**
* Update .env file with generated values
*/
function updateEnvFile(values) {
const envPath = path.join(__dirname, '../.env');
let envContent = '';
if (fs.existsSync(envPath)) {
envContent = fs.readFileSync(envPath, 'utf8');
}
// Update or add each value
for (const [key, value] of Object.entries(values)) {
const regex = new RegExp(`^${key}=.*$`, 'm');
if (regex.test(envContent)) {
envContent = envContent.replace(regex, `${key}=${value}`);
} else {
envContent += `\n${key}=${value}`;
}
}
fs.writeFileSync(envPath, envContent.trim() + '\n');
console.log(`✅ Updated ${envPath}`);
}
// Run initialization
initBlockchain()
.then(() => {
console.log('✅ Blockchain initialization completed successfully!');
process.exit(0);
})
.catch((error) => {
console.error('❌ Blockchain initialization failed:', error);
process.exit(1);
});

View File

@@ -0,0 +1,30 @@
#!/bin/bash
set -e
echo "🔄 Waiting for database to be ready..."
until PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -U "$DATABASE_USER" -d "$DATABASE_NAME" -c '\q' 2>/dev/null; do
echo "⏳ PostgreSQL is unavailable - sleeping"
sleep 2
done
echo "✅ PostgreSQL is up - checking if database is initialized..."
# Check if users table exists (indicating database is already set up)
TABLE_EXISTS=$(PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -U "$DATABASE_USER" -d "$DATABASE_NAME" -tAc "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_name = 'users');" 2>/dev/null || echo "f")
if [ "$TABLE_EXISTS" = "t" ]; then
echo "✅ Database already initialized, skipping setup."
else
echo "📦 First time setup - creating tables and seeding data..."
# Run the SQL scripts directly
echo "Creating tables..."
PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -U "$DATABASE_USER" -d "$DATABASE_NAME" -f /app/scripts/create-all-tables.sql
echo "🌱 Seeding initial data..."
PGPASSWORD=$DATABASE_PASSWORD psql -h "$DATABASE_HOST" -U "$DATABASE_USER" -d "$DATABASE_NAME" -f /app/scripts/seed-initial-data.sql
echo "✅ Database initialized successfully!"
fi
echo "✅ Database ready!"

View File

@@ -0,0 +1,45 @@
const knex = require('knex');
const path = require('path');
// Load environment variables
require('dotenv').config({ path: path.join(__dirname, '../.env') });
async function runMigrations() {
const knexConfig = {
client: 'pg',
connection: {
host: process.env.DATABASE_HOST,
port: parseInt(process.env.DATABASE_PORT || '5432', 10),
database: process.env.DATABASE_NAME,
user: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
},
migrations: {
directory: path.join(__dirname, '../src/database/migrations'),
tableName: 'knex_migrations',
loadExtensions: ['.ts'],
},
};
const db = knex(knexConfig);
try {
console.log('🔄 Running database migrations...');
await db.migrate.latest();
console.log('✅ Migrations completed successfully!');
console.log('🌱 Running database seeds...');
await db.seed.run({
directory: path.join(__dirname, '../src/database/seeds'),
loadExtensions: ['.ts'],
});
console.log('✅ Seeds completed successfully!');
} catch (error) {
console.error('❌ Migration failed:', error);
process.exit(1);
} finally {
await db.destroy();
}
}
runMigrations();

View File

@@ -0,0 +1,208 @@
-- =============================================
-- Initial Seed Data for Goa GEL Platform
-- =============================================
-- Insert Departments
INSERT INTO departments (id, code, name, wallet_address, is_active, description, contact_email, contact_phone, created_at, updated_at)
VALUES
(
'11111111-1111-1111-1111-111111111111',
'FIRE_DEPT',
'Fire & Emergency Services Department',
'0x1111111111111111111111111111111111111111',
true,
'Responsible for fire safety inspections and certifications',
'fire@goa.gov.in',
'+91-832-2222222',
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'22222222-2222-2222-2222-222222222222',
'TOURISM_DEPT',
'Department of Tourism',
'0x2222222222222222222222222222222222222222',
true,
'Manages tourism licenses and hospitality registrations',
'tourism@goa.gov.in',
'+91-832-3333333',
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'33333333-3333-3333-3333-333333333333',
'MUNICIPALITY',
'Municipal Corporation of Panaji',
'0x3333333333333333333333333333333333333333',
true,
'Local governance and building permits',
'municipality@goa.gov.in',
'+91-832-4444444',
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'44444444-4444-4444-4444-444444444444',
'HEALTH_DEPT',
'Directorate of Health Services',
'0x4444444444444444444444444444444444444444',
true,
'Health and sanitation inspections',
'health@goa.gov.in',
'+91-832-5555555',
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
)
ON CONFLICT (code) DO NOTHING;
-- Insert Demo Users
-- Password hashes are for: Admin@123, Fire@123, Tourism@123, Municipality@123, Citizen@123
INSERT INTO users (id, email, password_hash, name, role, department_id, phone, is_active, created_at, updated_at)
VALUES
(
'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'admin@goa.gov.in',
'$2b$10$uTkObgkUNJSVLb0ESwSQqekO4wKJJvjC02VdEb38vxzRT9ib4ByM.',
'System Administrator',
'ADMIN',
NULL,
'+91-9876543210',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
'fire@goa.gov.in',
'$2b$10$YB1iB3GjHfTwtaULRxSoRudg2eUft4b40V/1YI1iDK8OeAel7OXby',
'Fire Department Officer',
'DEPARTMENT',
'11111111-1111-1111-1111-111111111111',
'+91-9876543211',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'cccccccc-cccc-cccc-cccc-cccccccccccc',
'tourism@goa.gov.in',
'$2b$10$MwcPrX91SxlZN09eQxEA4u6ErLOnw7DmrD2f3C7pzEY0pbKRJ.p.e',
'Tourism Department Officer',
'DEPARTMENT',
'22222222-2222-2222-2222-222222222222',
'+91-9876543212',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'dddddddd-dddd-dddd-dddd-dddddddddddd',
'municipality@goa.gov.in',
'$2b$10$K4RH4xbduaGQRYMHJeXA3.7Z1eBnBTSDkOQgDLmYVWIUeYFKjp5xm',
'Municipality Officer',
'DEPARTMENT',
'33333333-3333-3333-3333-333333333333',
'+91-9876543213',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee',
'citizen@example.com',
'$2b$10$94al.IXYDxN6yNIycR4yI.soU00DqS3BwNBXvrLr4v6bB7B94oH6G',
'Demo Citizen',
'CITIZEN',
NULL,
'+91-9876543214',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'ffffffff-ffff-ffff-ffff-ffffffffffff',
'citizen2@example.com',
'$2b$10$94al.IXYDxN6yNIycR4yI.soU00DqS3BwNBXvrLr4v6bB7B94oH6G',
'Second Citizen',
'CITIZEN',
NULL,
'+91-9876543215',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
)
ON CONFLICT (email) DO NOTHING;
-- Insert Sample Applicants (linked to citizen users)
INSERT INTO applicants (id, digilocker_id, name, email, phone, is_active, created_at, updated_at)
VALUES
(
'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee',
'DL-GOA-CITIZEN-001',
'Demo Citizen',
'citizen@example.com',
'+91-9876543214',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'ffffffff-ffff-ffff-ffff-ffffffffffff',
'DL-GOA-CITIZEN-002',
'Second Citizen',
'citizen2@example.com',
'+91-9876543215',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
)
ON CONFLICT (digilocker_id) DO NOTHING;
-- Insert Sample Workflows
INSERT INTO workflows (id, workflow_type, name, description, version, definition, is_active, created_at, updated_at)
VALUES
(
'ffffffff-ffff-ffff-ffff-ffffffffffff',
'RESORT_LICENSE',
'Resort License Approval Workflow',
'Multi-department approval workflow for resort licenses in Goa',
1,
'{"isActive":true,"stages":[{"stageId":"stage_1_fire","stageName":"Fire Safety Review","stageOrder":1,"executionType":"SEQUENTIAL","requiredApprovals":[{"departmentCode":"FIRE_DEPT","departmentName":"Fire & Emergency Services Department","requiredDocuments":["FIRE_SAFETY_CERTIFICATE","BUILDING_PLAN"],"isMandatory":true}],"completionCriteria":"ALL","timeoutDays":7,"onTimeout":"NOTIFY","onRejection":"FAIL_REQUEST"},{"stageId":"stage_2_parallel","stageName":"Tourism & Municipality Review","stageOrder":2,"executionType":"PARALLEL","requiredApprovals":[{"departmentCode":"TOURISM_DEPT","departmentName":"Department of Tourism","requiredDocuments":["PROPERTY_OWNERSHIP","BUILDING_PLAN"],"isMandatory":true},{"departmentCode":"MUNICIPALITY","departmentName":"Municipal Corporation of Panaji","requiredDocuments":["PROPERTY_OWNERSHIP","TAX_CLEARANCE"],"isMandatory":true}],"completionCriteria":"ALL","timeoutDays":14,"onTimeout":"ESCALATE","onRejection":"FAIL_REQUEST"},{"stageId":"stage_3_health","stageName":"Health & Sanitation Review","stageOrder":3,"executionType":"SEQUENTIAL","requiredApprovals":[{"departmentCode":"HEALTH_DEPT","departmentName":"Directorate of Health Services","requiredDocuments":["HEALTH_CERTIFICATE"],"isMandatory":true}],"completionCriteria":"ALL","timeoutDays":7,"onTimeout":"NOTIFY","onRejection":"FAIL_REQUEST"}]}',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'FIRE_SAFETY_CERT',
'Fire Safety Certificate Workflow',
'Workflow for fire safety certification',
1,
'{"isActive":true,"stages":[{"stageId":"stage_1","stageName":"Fire Safety Review","stageOrder":1,"executionType":"SEQUENTIAL","requiredApprovals":[{"departmentCode":"FIRE_DEPT","departmentName":"Fire & Emergency Services Department","requiredDocuments":["FIRE_SAFETY_CERTIFICATE","BUILDING_PLAN"],"isMandatory":true}],"completionCriteria":"ALL","timeoutDays":7,"onTimeout":"NOTIFY","onRejection":"FAIL_REQUEST"}]}',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
'TOURISM_LICENSE',
'Tourism License Workflow',
'Workflow for tourism business licenses',
1,
'{"isActive":true,"stages":[{"stageId":"stage_1","stageName":"Tourism Department Review","stageOrder":1,"executionType":"SEQUENTIAL","requiredApprovals":[{"departmentCode":"TOURISM_DEPT","departmentName":"Department of Tourism","requiredDocuments":["PROPERTY_OWNERSHIP"],"isMandatory":true}],"completionCriteria":"ALL","timeoutDays":14,"onTimeout":"NOTIFY","onRejection":"FAIL_REQUEST"}]}',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
),
(
'cccccccc-cccc-cccc-cccc-cccccccccccc',
'TRADE_LICENSE',
'Trade License Workflow',
'Workflow for trade and business licenses',
1,
'{"isActive":true,"stages":[{"stageId":"stage_1","stageName":"Municipality Review","stageOrder":1,"executionType":"SEQUENTIAL","requiredApprovals":[{"departmentCode":"MUNICIPALITY","departmentName":"Municipal Corporation","requiredDocuments":["PROPERTY_OWNERSHIP","TAX_CLEARANCE"],"isMandatory":true}],"completionCriteria":"ALL","timeoutDays":14,"onTimeout":"NOTIFY","onRejection":"FAIL_REQUEST"}]}',
true,
CURRENT_TIMESTAMP,
CURRENT_TIMESTAMP
)
ON CONFLICT (workflow_type) DO NOTHING;

103
backend/src/app.module.ts Normal file
View File

@@ -0,0 +1,103 @@
import { Module } from '@nestjs/common';
import { ConfigModule, ConfigService } from '@nestjs/config';
import { ThrottlerModule, ThrottlerGuard } from '@nestjs/throttler';
import { BullModule } from '@nestjs/bull';
import { APP_GUARD } from '@nestjs/core';
// Configuration
import {
appConfig,
appConfigValidationSchema,
databaseConfig,
blockchainConfig,
storageConfig,
redisConfig,
jwtConfig,
minioConfig,
} from './config';
// Database
import { DatabaseModule } from './database/database.module';
// Modules
import { AuthModule } from './modules/auth/auth.module';
import { ApplicantsModule } from './modules/applicants/applicants.module';
import { DepartmentsModule } from './modules/departments/departments.module';
import { RequestsModule } from './modules/requests/requests.module';
import { DocumentsModule } from './modules/documents/documents.module';
import { ApprovalsModule } from './modules/approvals/approvals.module';
import { WorkflowsModule } from './modules/workflows/workflows.module';
import { WebhooksModule } from './modules/webhooks/webhooks.module';
import { BlockchainModule } from './modules/blockchain/blockchain.module';
import { AdminModule } from './modules/admin/admin.module';
import { AuditModule } from './modules/audit/audit.module';
import { HealthModule } from './modules/health/health.module';
import { UsersModule } from './modules/users/users.module';
@Module({
imports: [
// Configuration
ConfigModule.forRoot({
isGlobal: true,
load: [appConfig, databaseConfig, blockchainConfig, storageConfig, redisConfig, jwtConfig, minioConfig],
validationSchema: appConfigValidationSchema,
validationOptions: {
abortEarly: false,
},
}),
// Database (Knex + Objection.js)
DatabaseModule,
// Rate Limiting
ThrottlerModule.forRootAsync({
imports: [ConfigModule],
inject: [ConfigService],
useFactory: (configService: ConfigService) => {
const nodeEnv = configService.get<string>('NODE_ENV', 'development');
const isDevelopment = nodeEnv === 'development' || nodeEnv === 'test';
return [{
ttl: isDevelopment ? 1000 : configService.get<number>('RATE_LIMIT_TTL', 60) * 1000,
limit: isDevelopment ? 10000 : configService.get<number>('RATE_LIMIT_GLOBAL', 100),
}];
},
}),
// Bull Queue (Redis)
BullModule.forRootAsync({
imports: [ConfigModule],
inject: [ConfigService],
useFactory: (configService: ConfigService) => ({
redis: {
host: configService.get<string>('redis.host'),
port: configService.get<number>('redis.port'),
password: configService.get<string>('redis.password') || undefined,
db: configService.get<number>('redis.db'),
},
}),
}),
// Feature Modules
AuthModule,
ApplicantsModule,
DepartmentsModule,
RequestsModule,
DocumentsModule,
ApprovalsModule,
WorkflowsModule,
WebhooksModule,
BlockchainModule,
AdminModule,
AuditModule,
HealthModule,
UsersModule,
],
providers: [
{
provide: APP_GUARD,
useClass: ThrottlerGuard,
},
],
})
export class AppModule {}

View File

@@ -0,0 +1,8 @@
import { Module } from '@nestjs/common';
import { BlockchainService } from './blockchain.service';
@Module({
providers: [BlockchainService],
exports: [BlockchainService],
})
export class BlockchainModule {}

View File

@@ -0,0 +1,67 @@
import { Injectable, Logger, Inject } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { ethers } from 'ethers';
export interface BlockchainConfig {
rpcUrl: string;
chainId: number;
gasPrice: string;
gasLimit: string;
contractAddress: string;
privateKey: string;
networkName: string;
}
@Injectable()
export class BlockchainService {
private readonly logger = new Logger(BlockchainService.name);
private provider: ethers.JsonRpcProvider | null = null;
private signer: ethers.Wallet | null = null;
constructor(@Inject(ConfigService) private configService: ConfigService) {}
async initialize(): Promise<void> {
try {
const config = this.configService.get<BlockchainConfig>('blockchain');
if (!config) {
throw new Error('Blockchain configuration not found');
}
this.provider = new ethers.JsonRpcProvider(config.rpcUrl);
this.signer = new ethers.Wallet(config.privateKey, this.provider);
const network = await this.provider.getNetwork();
this.logger.log(
`Connected to blockchain network: ${network.name} (Chain ID: ${network.chainId})`,
);
} catch (error) {
this.logger.error('Failed to initialize blockchain service', error);
throw error;
}
}
getProvider(): ethers.JsonRpcProvider {
if (!this.provider) {
throw new Error('Blockchain provider not initialized');
}
return this.provider;
}
getSigner(): ethers.Wallet {
if (!this.signer) {
throw new Error('Blockchain signer not initialized');
}
return this.signer;
}
async getBalance(address: string): Promise<string> {
const balance = await this.provider!.getBalance(address);
return ethers.formatEther(balance);
}
async getTransactionStatus(transactionHash: string): Promise<string | null> {
const receipt = await this.provider!.getTransactionReceipt(transactionHash);
return receipt?.status === 1 ? 'success' : 'failed';
}
}

View File

@@ -0,0 +1,170 @@
export const ERROR_CODES = {
// Authentication & Authorization
INVALID_CREDENTIALS: 'AUTH_001',
TOKEN_EXPIRED: 'AUTH_002',
TOKEN_INVALID: 'AUTH_003',
INVALID_TOKEN: 'AUTH_003', // Alias
UNAUTHORIZED: 'AUTH_004',
FORBIDDEN: 'AUTH_005',
API_KEY_INVALID: 'AUTH_006',
INVALID_API_KEY: 'AUTH_006', // Alias
SESSION_EXPIRED: 'AUTH_007',
INSUFFICIENT_PERMISSIONS: 'AUTH_008',
// User Management
USER_NOT_FOUND: 'USER_001',
USER_ALREADY_EXISTS: 'USER_002',
USER_INACTIVE: 'USER_003',
USER_DELETED: 'USER_004',
INVALID_USER_DATA: 'USER_005',
// Applicant Management
APPLICANT_NOT_FOUND: 'APPL_001',
APPLICANT_ALREADY_EXISTS: 'APPL_002',
INVALID_APPLICANT_DATA: 'APPL_003',
// Document Management
DOCUMENT_NOT_FOUND: 'DOC_001',
DOCUMENT_ALREADY_VERIFIED: 'DOC_002',
DOCUMENT_EXPIRED: 'DOC_003',
INVALID_FILE_TYPE: 'DOC_004',
FILE_SIZE_EXCEEDED: 'DOC_005',
DOCUMENT_CORRUPTED: 'DOC_006',
DUPLICATE_DOCUMENT: 'DOC_007',
// Blockchain Operations
BLOCKCHAIN_CONNECTION_ERROR: 'CHAIN_001',
CONTRACT_CALL_ERROR: 'CHAIN_002',
TRANSACTION_FAILED: 'CHAIN_003',
INVALID_CONTRACT_ADDRESS: 'CHAIN_004',
INSUFFICIENT_GAS: 'CHAIN_005',
TRANSACTION_TIMEOUT: 'CHAIN_006',
BLOCKCHAIN_NOT_AVAILABLE: 'CHAIN_007',
// Storage Operations
STORAGE_ERROR: 'STOR_001',
STORAGE_NOT_FOUND: 'STOR_002',
STORAGE_QUOTA_EXCEEDED: 'STOR_003',
STORAGE_UPLOAD_FAILED: 'STOR_004',
STORAGE_ACCESS_DENIED: 'STOR_005',
// Database Operations
DATABASE_ERROR: 'DB_001',
DATABASE_CONNECTION_ERROR: 'DB_002',
TRANSACTION_ERROR: 'DB_003',
CONSTRAINT_VIOLATION: 'DB_004',
// Validation Errors
VALIDATION_ERROR: 'VAL_001',
INVALID_INPUT: 'VAL_002',
MISSING_REQUIRED_FIELD: 'VAL_003',
INVALID_EMAIL: 'VAL_004',
INVALID_DATE: 'VAL_005',
// Rate Limiting
RATE_LIMIT_EXCEEDED: 'RATE_001',
TOO_MANY_REQUESTS: 'RATE_002',
// Server Errors
INTERNAL_SERVER_ERROR: 'SERVER_001',
INTERNAL_ERROR: 'SERVER_001', // Alias
SERVICE_UNAVAILABLE: 'SERVER_002',
TIMEOUT: 'SERVER_003',
NOT_IMPLEMENTED: 'SERVER_004',
NOT_FOUND: 'SERVER_005',
// Queue Operations
QUEUE_ERROR: 'QUEUE_001',
JOB_FAILED: 'QUEUE_002',
JOB_NOT_FOUND: 'QUEUE_003',
// Email Operations
EMAIL_SEND_ERROR: 'EMAIL_001',
INVALID_EMAIL_ADDRESS: 'EMAIL_002',
// Department Management
DEPARTMENT_NOT_FOUND: 'DEPT_001',
DEPARTMENT_ALREADY_EXISTS: 'DEPT_002',
INVALID_DEPARTMENT_DATA: 'DEPT_003',
// Audit & Logging
AUDIT_RECORD_ERROR: 'AUDIT_001',
LOG_ERROR: 'LOG_001',
};
export const ERROR_MESSAGES: Record<string, string> = {
[ERROR_CODES.INVALID_CREDENTIALS]: 'Invalid email or password',
[ERROR_CODES.TOKEN_EXPIRED]: 'Token has expired',
[ERROR_CODES.TOKEN_INVALID]: 'Invalid or malformed token',
[ERROR_CODES.UNAUTHORIZED]: 'Unauthorized access',
[ERROR_CODES.FORBIDDEN]: 'Forbidden resource',
[ERROR_CODES.API_KEY_INVALID]: 'Invalid API key',
[ERROR_CODES.SESSION_EXPIRED]: 'Session has expired',
[ERROR_CODES.INSUFFICIENT_PERMISSIONS]: 'Insufficient permissions',
[ERROR_CODES.USER_NOT_FOUND]: 'User not found',
[ERROR_CODES.USER_ALREADY_EXISTS]: 'User already exists',
[ERROR_CODES.USER_INACTIVE]: 'User account is inactive',
[ERROR_CODES.USER_DELETED]: 'User account has been deleted',
[ERROR_CODES.INVALID_USER_DATA]: 'Invalid user data provided',
[ERROR_CODES.APPLICANT_NOT_FOUND]: 'Applicant not found',
[ERROR_CODES.APPLICANT_ALREADY_EXISTS]: 'Applicant already exists',
[ERROR_CODES.INVALID_APPLICANT_DATA]: 'Invalid applicant data provided',
[ERROR_CODES.DOCUMENT_NOT_FOUND]: 'Document not found',
[ERROR_CODES.DOCUMENT_ALREADY_VERIFIED]: 'Document is already verified',
[ERROR_CODES.DOCUMENT_EXPIRED]: 'Document has expired',
[ERROR_CODES.INVALID_FILE_TYPE]: 'Invalid file type',
[ERROR_CODES.FILE_SIZE_EXCEEDED]: 'File size exceeds maximum limit',
[ERROR_CODES.DOCUMENT_CORRUPTED]: 'Document appears to be corrupted',
[ERROR_CODES.DUPLICATE_DOCUMENT]: 'Document already exists',
[ERROR_CODES.BLOCKCHAIN_CONNECTION_ERROR]: 'Failed to connect to blockchain network',
[ERROR_CODES.CONTRACT_CALL_ERROR]: 'Smart contract call failed',
[ERROR_CODES.TRANSACTION_FAILED]: 'Blockchain transaction failed',
[ERROR_CODES.INVALID_CONTRACT_ADDRESS]: 'Invalid smart contract address',
[ERROR_CODES.INSUFFICIENT_GAS]: 'Insufficient gas for transaction',
[ERROR_CODES.TRANSACTION_TIMEOUT]: 'Blockchain transaction timeout',
[ERROR_CODES.BLOCKCHAIN_NOT_AVAILABLE]: 'Blockchain network is not available',
[ERROR_CODES.STORAGE_ERROR]: 'Storage operation failed',
[ERROR_CODES.STORAGE_NOT_FOUND]: 'File not found in storage',
[ERROR_CODES.STORAGE_QUOTA_EXCEEDED]: 'Storage quota exceeded',
[ERROR_CODES.STORAGE_UPLOAD_FAILED]: 'File upload failed',
[ERROR_CODES.STORAGE_ACCESS_DENIED]: 'Access to storage denied',
[ERROR_CODES.DATABASE_ERROR]: 'Database operation failed',
[ERROR_CODES.DATABASE_CONNECTION_ERROR]: 'Failed to connect to database',
[ERROR_CODES.TRANSACTION_ERROR]: 'Database transaction error',
[ERROR_CODES.CONSTRAINT_VIOLATION]: 'Database constraint violation',
[ERROR_CODES.VALIDATION_ERROR]: 'Validation error',
[ERROR_CODES.INVALID_INPUT]: 'Invalid input provided',
[ERROR_CODES.MISSING_REQUIRED_FIELD]: 'Missing required field',
[ERROR_CODES.INVALID_EMAIL]: 'Invalid email format',
[ERROR_CODES.INVALID_DATE]: 'Invalid date format',
[ERROR_CODES.RATE_LIMIT_EXCEEDED]: 'Rate limit exceeded',
[ERROR_CODES.TOO_MANY_REQUESTS]: 'Too many requests',
[ERROR_CODES.INTERNAL_SERVER_ERROR]: 'Internal server error',
[ERROR_CODES.SERVICE_UNAVAILABLE]: 'Service unavailable',
[ERROR_CODES.TIMEOUT]: 'Operation timeout',
[ERROR_CODES.NOT_IMPLEMENTED]: 'Feature not implemented',
[ERROR_CODES.NOT_FOUND]: 'Resource not found',
[ERROR_CODES.QUEUE_ERROR]: 'Queue operation failed',
[ERROR_CODES.JOB_FAILED]: 'Job execution failed',
[ERROR_CODES.JOB_NOT_FOUND]: 'Job not found',
[ERROR_CODES.EMAIL_SEND_ERROR]: 'Failed to send email',
[ERROR_CODES.INVALID_EMAIL_ADDRESS]: 'Invalid email address',
[ERROR_CODES.DEPARTMENT_NOT_FOUND]: 'Department not found',
[ERROR_CODES.DEPARTMENT_ALREADY_EXISTS]: 'Department already exists',
[ERROR_CODES.INVALID_DEPARTMENT_DATA]: 'Invalid department data',
[ERROR_CODES.AUDIT_RECORD_ERROR]: 'Failed to record audit log',
[ERROR_CODES.LOG_ERROR]: 'Logging error',
};

View File

@@ -0,0 +1,49 @@
export const APP_EVENTS = {
// User Events
USER_CREATED: 'user.created',
USER_UPDATED: 'user.updated',
USER_DELETED: 'user.deleted',
USER_LOGIN: 'user.login',
USER_LOGOUT: 'user.logout',
USER_PASSWORD_CHANGED: 'user.password_changed',
// Document Events
DOCUMENT_UPLOADED: 'document.uploaded',
DOCUMENT_VERIFIED: 'document.verified',
DOCUMENT_REJECTED: 'document.rejected',
DOCUMENT_REVOKED: 'document.revoked',
DOCUMENT_ARCHIVED: 'document.archived',
DOCUMENT_RESTORED: 'document.restored',
DOCUMENT_DOWNLOADED: 'document.downloaded',
// Blockchain Events
BLOCKCHAIN_VERIFICATION_STARTED: 'blockchain.verification_started',
BLOCKCHAIN_VERIFICATION_COMPLETED: 'blockchain.verification_completed',
BLOCKCHAIN_VERIFICATION_FAILED: 'blockchain.verification_failed',
TRANSACTION_CREATED: 'blockchain.transaction_created',
TRANSACTION_CONFIRMED: 'blockchain.transaction_confirmed',
TRANSACTION_FAILED: 'blockchain.transaction_failed',
// Department Events
DEPARTMENT_CREATED: 'department.created',
DEPARTMENT_UPDATED: 'department.updated',
DEPARTMENT_DELETED: 'department.deleted',
// Audit Events
AUDIT_LOG_CREATED: 'audit.log_created',
AUDIT_LOG_ACCESSED: 'audit.log_accessed',
// System Events
SYSTEM_HEALTH_CHECK: 'system.health_check',
SYSTEM_ALERT: 'system.alert',
SYSTEM_ERROR: 'system.error',
DATABASE_BACKUP: 'database.backup',
STORAGE_BACKUP: 'storage.backup',
// Queue Events
JOB_QUEUED: 'queue.job_queued',
JOB_PROCESSING: 'queue.job_processing',
JOB_COMPLETED: 'queue.job_completed',
JOB_FAILED: 'queue.job_failed',
JOB_RETRY: 'queue.job_retry',
};

View File

@@ -0,0 +1,49 @@
export * from './events';
export * from './error-codes';
export const API_PREFIX = 'api';
export const API_VERSION = 'v1';
export const DEFAULT_PAGE_SIZE = 20;
export const MAX_PAGE_SIZE = 100;
export const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB
export const ALLOWED_MIME_TYPES = [
'application/pdf',
'image/jpeg',
'image/png',
'image/jpg',
];
export const REQUEST_NUMBER_PREFIX = {
RESORT_LICENSE: 'RL',
TRADE_LICENSE: 'TL',
BUILDING_PERMIT: 'BP',
};
export const WEBHOOK_RETRY_ATTEMPTS = 3;
export const WEBHOOK_RETRY_DELAY = 5000; // 5 seconds
export const BLOCKCHAIN_CONFIRMATION_BLOCKS = 1;
export const BLOCKCHAIN_GAS_LIMIT = 8000000;
export const CACHE_TTL = {
WORKFLOW: 600, // 10 minutes
DEPARTMENT: 3600, // 1 hour
REQUEST_STATUS: 300, // 5 minutes
};
export const RATE_LIMIT = {
GLOBAL: { ttl: 60, limit: 100 },
API_KEY: { ttl: 60, limit: 1000 },
UPLOAD: { ttl: 60, limit: 10 },
};
export const JWT_CONSTANTS = {
ACCESS_TOKEN_EXPIRY: '1d',
REFRESH_TOKEN_EXPIRY: '7d',
};
export const CORRELATION_ID_HEADER = 'x-correlation-id';
export const API_KEY_HEADER = 'x-api-key';
export const DEPARTMENT_CODE_HEADER = 'x-department-code';

View File

@@ -0,0 +1,21 @@
import { applyDecorators, UseGuards } from '@nestjs/common';
import { ApiHeader, ApiUnauthorizedResponse } from '@nestjs/swagger';
import { ApiKeyGuard } from '../guards/api-key.guard';
import { API_KEY_HEADER, DEPARTMENT_CODE_HEADER } from '../constants';
export function ApiKeyAuth(): ReturnType<typeof applyDecorators> {
return applyDecorators(
UseGuards(ApiKeyGuard),
ApiHeader({
name: API_KEY_HEADER,
description: 'Department API Key',
required: true,
}),
ApiHeader({
name: DEPARTMENT_CODE_HEADER,
description: 'Department Code (e.g., FIRE_DEPT)',
required: true,
}),
ApiUnauthorizedResponse({ description: 'Invalid or missing API key' }),
);
}

View File

@@ -0,0 +1,7 @@
import { SetMetadata } from '@nestjs/common';
export const API_KEY_METADATA = 'api-key';
export const ApiKeyAuth = (): MethodDecorator & ClassDecorator => {
return SetMetadata(API_KEY_METADATA, true);
};

View File

@@ -0,0 +1,9 @@
import { createParamDecorator, ExecutionContext } from '@nestjs/common';
import { v4 as uuidv4 } from 'uuid';
export const CorrelationId = createParamDecorator(
(data: unknown, ctx: ExecutionContext) => {
const request = ctx.switchToHttp().getRequest();
return request.headers['x-correlation-id'] || uuidv4();
},
);

View File

@@ -0,0 +1,15 @@
import { createParamDecorator, ExecutionContext } from '@nestjs/common';
import { RequestContext } from '../interfaces/request-context.interface';
export const CurrentUser = createParamDecorator(
(data: keyof RequestContext | undefined, ctx: ExecutionContext) => {
const request = ctx.switchToHttp().getRequest();
const user = request.user as RequestContext;
if (data) {
return user?.[data];
}
return user;
},
);

View File

@@ -0,0 +1,7 @@
import { SetMetadata } from '@nestjs/common';
export const DEPARTMENT_METADATA = 'department';
export const RequireDepartment = (departmentId: string): MethodDecorator & ClassDecorator => {
return SetMetadata(DEPARTMENT_METADATA, departmentId);
};

View File

@@ -0,0 +1,6 @@
export * from './roles.decorator';
export * from './current-user.decorator';
export * from './api-key-auth.decorator';
export { API_KEY_METADATA } from './api-key.decorator';
export * from './correlation-id.decorator';
export * from './department.decorator';

View File

@@ -0,0 +1,6 @@
import { SetMetadata } from '@nestjs/common';
import { UserRole } from '../enums';
import { ROLES_KEY } from '../guards/roles.guard';
export const Roles = (...roles: (UserRole | string)[]): ReturnType<typeof SetMetadata> =>
SetMetadata(ROLES_KEY, roles);

View File

@@ -0,0 +1,33 @@
import { ApiProperty } from '@nestjs/swagger';
export class PaginatedResponse<T> {
@ApiProperty({
description: 'Array of items',
isArray: true,
})
data: T[];
@ApiProperty({
description: 'Total number of items',
example: 100,
})
total: number;
@ApiProperty({
description: 'Current page number',
example: 1,
})
page: number;
@ApiProperty({
description: 'Number of items per page',
example: 10,
})
limit: number;
@ApiProperty({
description: 'Total number of pages',
example: 10,
})
totalPages: number;
}

View File

@@ -0,0 +1,31 @@
import { IsOptional, IsNumber, Min, Max } from 'class-validator';
import { Type } from 'class-transformer';
import { ApiProperty } from '@nestjs/swagger';
export class PaginationDto {
@ApiProperty({
description: 'Page number',
example: 1,
required: false,
minimum: 1,
})
@Type(() => Number)
@IsOptional()
@IsNumber()
@Min(1)
page: number = 1;
@ApiProperty({
description: 'Number of items per page',
example: 10,
required: false,
minimum: 1,
maximum: 100,
})
@Type(() => Number)
@IsOptional()
@IsNumber()
@Min(1)
@Max(100)
limit: number = 10;
}

View File

@@ -0,0 +1,128 @@
export enum RequestStatus {
DRAFT = 'DRAFT',
SUBMITTED = 'SUBMITTED',
IN_REVIEW = 'IN_REVIEW',
PENDING_RESUBMISSION = 'PENDING_RESUBMISSION',
APPROVED = 'APPROVED',
REJECTED = 'REJECTED',
REVOKED = 'REVOKED',
CANCELLED = 'CANCELLED',
}
// Alias for backward compatibility
export const LicenseRequestStatus = RequestStatus;
export type LicenseRequestStatus = RequestStatus;
export enum ApprovalStatus {
PENDING = 'PENDING',
APPROVED = 'APPROVED',
REJECTED = 'REJECTED',
CHANGES_REQUESTED = 'CHANGES_REQUESTED',
REVIEW_REQUIRED = 'REVIEW_REQUIRED',
}
export enum TransactionType {
MINT_NFT = 'MINT_NFT',
APPROVAL = 'APPROVAL',
DOC_UPDATE = 'DOC_UPDATE',
REJECT = 'REJECT',
REVOKE = 'REVOKE',
}
export enum TransactionStatus {
PENDING = 'PENDING',
CONFIRMED = 'CONFIRMED',
FAILED = 'FAILED',
}
export enum WebhookEventType {
APPROVAL_REQUIRED = 'APPROVAL_REQUIRED',
DOCUMENT_UPDATED = 'DOCUMENT_UPDATED',
REQUEST_APPROVED = 'REQUEST_APPROVED',
REQUEST_REJECTED = 'REQUEST_REJECTED',
CHANGES_REQUESTED = 'CHANGES_REQUESTED',
}
export enum WebhookDeliveryStatus {
PENDING = 'PENDING',
SUCCESS = 'SUCCESS',
FAILED = 'FAILED',
}
// Alias for backward compatibility
export const WebhookLogStatus = WebhookDeliveryStatus;
export type WebhookLogStatus = WebhookDeliveryStatus;
export enum ActorType {
APPLICANT = 'APPLICANT',
DEPARTMENT = 'DEPARTMENT',
SYSTEM = 'SYSTEM',
ADMIN = 'ADMIN',
}
export enum EntityType {
REQUEST = 'REQUEST',
APPROVAL = 'APPROVAL',
DOCUMENT = 'DOCUMENT',
DEPARTMENT = 'DEPARTMENT',
WORKFLOW = 'WORKFLOW',
APPLICANT = 'APPLICANT',
}
export enum AuditAction {
CREATE = 'CREATE',
UPDATE = 'UPDATE',
DELETE = 'DELETE',
SUBMIT = 'SUBMIT',
APPROVE = 'APPROVE',
REJECT = 'REJECT',
CANCEL = 'CANCEL',
UPLOAD = 'UPLOAD',
DOWNLOAD = 'DOWNLOAD',
}
export enum RequestType {
RESORT_LICENSE = 'RESORT_LICENSE',
TRADE_LICENSE = 'TRADE_LICENSE',
BUILDING_PERMIT = 'BUILDING_PERMIT',
}
export enum DocumentType {
PROPERTY_OWNERSHIP = 'PROPERTY_OWNERSHIP',
FIRE_SAFETY_CERTIFICATE = 'FIRE_SAFETY_CERTIFICATE',
BUILDING_PLAN = 'BUILDING_PLAN',
ENVIRONMENTAL_CLEARANCE = 'ENVIRONMENTAL_CLEARANCE',
HEALTH_CERTIFICATE = 'HEALTH_CERTIFICATE',
TAX_CLEARANCE = 'TAX_CLEARANCE',
IDENTITY_PROOF = 'IDENTITY_PROOF',
OTHER = 'OTHER',
}
export enum WorkflowExecutionType {
SEQUENTIAL = 'SEQUENTIAL',
PARALLEL = 'PARALLEL',
}
export enum CompletionCriteria {
ALL = 'ALL',
ANY = 'ANY',
THRESHOLD = 'THRESHOLD',
}
export enum TimeoutAction {
NOTIFY = 'NOTIFY',
ESCALATE = 'ESCALATE',
AUTO_REJECT = 'AUTO_REJECT',
}
export enum RejectionAction {
FAIL_REQUEST = 'FAIL_REQUEST',
RETRY_STAGE = 'RETRY_STAGE',
ESCALATE = 'ESCALATE',
}
export enum UserRole {
ADMIN = 'ADMIN',
DEPARTMENT = 'DEPARTMENT',
APPLICANT = 'APPLICANT',
}

View File

@@ -0,0 +1,69 @@
import {
ExceptionFilter,
Catch,
ArgumentsHost,
HttpStatus,
Logger,
} from '@nestjs/common';
import { Response } from 'express';
import { ERROR_CODES } from '@common/constants/error-codes';
interface ErrorResponse {
success: boolean;
statusCode: number;
message: string;
error: {
code: string;
message: string;
};
timestamp: string;
path: string;
}
@Catch()
export class AllExceptionsFilter implements ExceptionFilter {
private readonly logger = new Logger(AllExceptionsFilter.name);
catch(exception: unknown, host: ArgumentsHost): void {
const ctx = host.switchToHttp();
const response = ctx.getResponse<Response>();
const request = ctx.getRequest();
let status = HttpStatus.INTERNAL_SERVER_ERROR;
let errorCode = ERROR_CODES.INTERNAL_SERVER_ERROR;
let message = 'An unexpected error occurred';
if (exception instanceof Error) {
this.logger.error(
`Unhandled Exception: ${exception.message}`,
exception.stack,
);
if (exception.message.includes('ECONNREFUSED')) {
status = HttpStatus.SERVICE_UNAVAILABLE;
errorCode = ERROR_CODES.SERVICE_UNAVAILABLE;
message = 'Database connection failed';
} else if (exception.message.includes('timeout')) {
status = HttpStatus.REQUEST_TIMEOUT;
errorCode = ERROR_CODES.TIMEOUT;
message = 'Operation timeout';
}
} else {
this.logger.error('Unhandled Exception:', exception);
}
const errorResponse: ErrorResponse = {
success: false,
statusCode: status,
message,
error: {
code: errorCode,
message,
},
timestamp: new Date().toISOString(),
path: request.url,
};
response.status(status).json(errorResponse);
}
}

View File

@@ -0,0 +1,97 @@
import {
ExceptionFilter,
Catch,
ArgumentsHost,
HttpException,
HttpStatus,
Logger,
} from '@nestjs/common';
import { Request, Response } from 'express';
import { ERROR_CODES } from '../constants';
interface ErrorResponse {
statusCode: number;
code: string;
message: string;
details?: Record<string, unknown>;
timestamp: string;
path: string;
correlationId?: string;
}
@Catch()
export class HttpExceptionFilter implements ExceptionFilter {
private readonly logger = new Logger(HttpExceptionFilter.name);
catch(exception: unknown, host: ArgumentsHost): void {
const ctx = host.switchToHttp();
const response = ctx.getResponse<Response>();
const request = ctx.getRequest<Request>();
const correlationId = request.headers['x-correlation-id'] as string;
let status = HttpStatus.INTERNAL_SERVER_ERROR;
let code = ERROR_CODES.INTERNAL_ERROR;
let message = 'Internal server error';
let details: Record<string, unknown> | undefined;
if (exception instanceof HttpException) {
status = exception.getStatus();
const exceptionResponse = exception.getResponse();
if (typeof exceptionResponse === 'object') {
const resp = exceptionResponse as Record<string, unknown>;
message = (resp.message as string) || exception.message;
code = (resp.code as string) || this.getErrorCode(status);
details = resp.details as Record<string, unknown>;
} else {
message = exceptionResponse as string;
code = this.getErrorCode(status);
}
} else if (exception instanceof Error) {
message = exception.message;
this.logger.error(
`Unhandled exception: ${message}`,
exception.stack,
correlationId,
);
}
const errorResponse: ErrorResponse = {
statusCode: status,
code,
message,
timestamp: new Date().toISOString(),
path: request.url,
};
if (details) {
errorResponse.details = details;
}
if (correlationId) {
errorResponse.correlationId = correlationId;
}
// Don't expose stack traces in production
if (process.env.NODE_ENV === 'development' && exception instanceof Error) {
errorResponse.details = { ...errorResponse.details, stack: exception.stack };
}
response.status(status).json(errorResponse);
}
private getErrorCode(status: number): string {
switch (status) {
case HttpStatus.BAD_REQUEST:
return ERROR_CODES.VALIDATION_ERROR;
case HttpStatus.UNAUTHORIZED:
return ERROR_CODES.UNAUTHORIZED;
case HttpStatus.FORBIDDEN:
return ERROR_CODES.INSUFFICIENT_PERMISSIONS;
case HttpStatus.NOT_FOUND:
return ERROR_CODES.NOT_FOUND;
default:
return ERROR_CODES.INTERNAL_ERROR;
}
}
}

View File

@@ -0,0 +1,2 @@
export * from './all-exceptions.filter';
export * from './http-exception.filter';

View File

@@ -0,0 +1,37 @@
import {
Injectable,
CanActivate,
ExecutionContext,
UnauthorizedException,
} from '@nestjs/common';
import { Request } from 'express';
import { API_KEY_HEADER, DEPARTMENT_CODE_HEADER, ERROR_CODES } from '../constants';
@Injectable()
export class ApiKeyGuard implements CanActivate {
async canActivate(context: ExecutionContext): Promise<boolean> {
const request = context.switchToHttp().getRequest<Request>();
const apiKey = request.headers[API_KEY_HEADER] as string;
const departmentCode = request.headers[DEPARTMENT_CODE_HEADER] as string;
if (!apiKey) {
throw new UnauthorizedException({
code: ERROR_CODES.INVALID_API_KEY,
message: 'API key is required',
});
}
if (!departmentCode) {
throw new UnauthorizedException({
code: ERROR_CODES.INVALID_API_KEY,
message: 'Department code is required',
});
}
// Note: Actual validation is done in AuthService
// This guard just ensures the headers are present
// The AuthModule middleware validates the API key
return true;
}
}

View File

@@ -0,0 +1,3 @@
export * from './api-key.guard';
export * from './jwt-auth.guard';
export * from './roles.guard';

View File

@@ -0,0 +1,18 @@
import { Injectable, UnauthorizedException } from '@nestjs/common';
import { AuthGuard } from '@nestjs/passport';
@Injectable()
export class JwtAuthGuard extends AuthGuard('jwt') {
handleRequest<User = unknown>(err: unknown, user: User, info: unknown): User {
if (err) {
throw err;
}
if (!user) {
const errorMessage = info instanceof Error ? info.message : 'Unauthorized';
throw new UnauthorizedException(errorMessage);
}
return user;
}
}

View File

@@ -0,0 +1,48 @@
import {
Injectable,
CanActivate,
ExecutionContext,
ForbiddenException,
} from '@nestjs/common';
import { Reflector } from '@nestjs/core';
import { UserRole } from '../enums';
import { ERROR_CODES } from '../constants';
export const ROLES_KEY = 'roles';
@Injectable()
export class RolesGuard implements CanActivate {
constructor(private reflector: Reflector) {}
canActivate(context: ExecutionContext): boolean {
const requiredRoles = this.reflector.getAllAndOverride<UserRole[]>(ROLES_KEY, [
context.getHandler(),
context.getClass(),
]);
if (!requiredRoles) {
return true;
}
const request = context.switchToHttp().getRequest();
const user = request.user;
if (!user || !user.role) {
throw new ForbiddenException({
code: ERROR_CODES.INSUFFICIENT_PERMISSIONS,
message: 'Access denied',
});
}
const hasRole = requiredRoles.some((role) => user.role === role);
if (!hasRole) {
throw new ForbiddenException({
code: ERROR_CODES.INSUFFICIENT_PERMISSIONS,
message: 'You do not have permission to perform this action',
});
}
return true;
}
}

View File

@@ -0,0 +1,25 @@
import {
Injectable,
NestInterceptor,
ExecutionContext,
CallHandler,
} from '@nestjs/common';
import { Observable } from 'rxjs';
import { Request, Response } from 'express';
import { v4 as uuidv4 } from 'uuid';
import { CORRELATION_ID_HEADER } from '../constants';
@Injectable()
export class CorrelationIdInterceptor implements NestInterceptor {
intercept(context: ExecutionContext, next: CallHandler): Observable<unknown> {
const request = context.switchToHttp().getRequest<Request>();
const response = context.switchToHttp().getResponse<Response>();
const correlationId = (request.headers[CORRELATION_ID_HEADER] as string) || uuidv4();
request.headers[CORRELATION_ID_HEADER] = correlationId;
response.setHeader(CORRELATION_ID_HEADER, correlationId);
return next.handle();
}
}

View File

@@ -0,0 +1,4 @@
export * from './logging.interceptor';
export * from './correlation-id.interceptor';
export * from './timeout.interceptor';
export * from './transform.interceptor';

View File

@@ -0,0 +1,58 @@
import {
Injectable,
NestInterceptor,
ExecutionContext,
CallHandler,
Logger,
} from '@nestjs/common';
import { Observable } from 'rxjs';
import { tap } from 'rxjs/operators';
import { Request, Response } from 'express';
@Injectable()
export class LoggingInterceptor implements NestInterceptor {
private readonly logger = new Logger('HTTP');
intercept(context: ExecutionContext, next: CallHandler): Observable<unknown> {
const request = context.switchToHttp().getRequest<Request>();
const response = context.switchToHttp().getResponse<Response>();
const { method, url, ip } = request;
const correlationId = request.headers['x-correlation-id'] as string || 'no-correlation-id';
const userAgent = request.get('user-agent') || '';
const startTime = Date.now();
return next.handle().pipe(
tap({
next: (): void => {
const duration = Date.now() - startTime;
this.logger.log(
JSON.stringify({
correlationId,
method,
url,
statusCode: response.statusCode,
duration: `${duration}ms`,
ip,
userAgent,
}),
);
},
error: (error): void => {
const duration = Date.now() - startTime;
this.logger.error(
JSON.stringify({
correlationId,
method,
url,
statusCode: error.status || 500,
duration: `${duration}ms`,
ip,
userAgent,
error: error.message,
}),
);
},
}),
);
}
}

View File

@@ -0,0 +1,18 @@
import {
Injectable,
NestInterceptor,
ExecutionContext,
CallHandler,
RequestTimeoutException,
} from '@nestjs/common';
import { Observable } from 'rxjs';
import { timeout } from 'rxjs/operators';
@Injectable()
export class TimeoutInterceptor implements NestInterceptor {
intercept(_context: ExecutionContext, next: CallHandler): Observable<unknown> {
return next.handle().pipe(
timeout(30000),
);
}
}

View File

@@ -0,0 +1,27 @@
import {
Injectable,
NestInterceptor,
ExecutionContext,
CallHandler,
} from '@nestjs/common';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
export interface ApiResponse<T> {
success: boolean;
data: T;
timestamp: string;
}
@Injectable()
export class TransformInterceptor<T> implements NestInterceptor<T, ApiResponse<T>> {
intercept(context: ExecutionContext, next: CallHandler): Observable<ApiResponse<T>> {
return next.handle().pipe(
map(data => ({
success: true,
data,
timestamp: new Date().toISOString(),
})),
);
}
}

View File

@@ -0,0 +1,104 @@
import { UserRole } from '../enums';
export interface RequestContext {
correlationId: string;
userId?: string;
departmentId?: string;
departmentCode?: string;
role?: UserRole;
ipAddress?: string;
userAgent?: string;
}
export interface JwtPayload {
sub: string;
email?: string;
role: UserRole;
departmentCode?: string;
iat?: number;
exp?: number;
}
export interface ApiKeyPayload {
departmentId: string;
departmentCode: string;
}
export interface PaginatedResult<T> {
data: T[];
meta: {
page: number;
limit: number;
total: number;
totalPages: number;
hasNext: boolean;
hasPrev: boolean;
};
}
export interface PaginationMetadata {
page: number;
limit: number;
total: number;
totalPages: number;
hasNextPage: boolean;
hasPreviousPage: boolean;
}
export interface WorkflowDefinition {
workflowId: string;
workflowType: string;
version: number;
isActive: boolean;
stages: WorkflowStage[];
createdAt: Date;
updatedAt: Date;
}
export interface WorkflowStage {
stageId: string;
stageName: string;
stageOrder: number;
executionType: 'SEQUENTIAL' | 'PARALLEL';
requiredApprovals: DepartmentApproval[];
completionCriteria: 'ALL' | 'ANY' | 'THRESHOLD';
threshold?: number;
timeoutDays?: number;
onTimeout: 'NOTIFY' | 'ESCALATE' | 'AUTO_REJECT';
onRejection: 'FAIL_REQUEST' | 'RETRY_STAGE' | 'ESCALATE';
}
export interface DepartmentApproval {
departmentCode: string;
departmentName: string;
requiredDocuments: string[];
isMandatory: boolean;
}
export interface TimelineEvent {
eventId: string;
eventType: string;
description: string;
actor: {
type: string;
id?: string;
name?: string;
};
metadata?: Record<string, unknown>;
transactionHash?: string;
timestamp: Date;
}
export interface WebhookPayload {
event: string;
timestamp: string;
data: Record<string, unknown>;
signature?: string;
}
export interface BlockchainReceipt {
transactionHash: string;
blockNumber: number;
gasUsed: bigint;
status: boolean;
}

View File

@@ -0,0 +1 @@
export * from './validation.pipe';

View File

@@ -0,0 +1,12 @@
import { PipeTransform, Injectable, BadRequestException } from '@nestjs/common';
import { validate as isUuid } from 'uuid';
@Injectable()
export class UuidValidationPipe implements PipeTransform<string, string> {
transform(value: string): string {
if (!isUuid(value)) {
throw new BadRequestException('Invalid UUID format');
}
return value;
}
}

View File

@@ -0,0 +1,44 @@
import {
PipeTransform,
Injectable,
ArgumentMetadata,
BadRequestException,
} from '@nestjs/common';
import { validate } from 'class-validator';
import { plainToInstance } from 'class-transformer';
import { ERROR_CODES } from '../constants';
@Injectable()
export class CustomValidationPipe implements PipeTransform {
async transform(value: unknown, { metatype }: ArgumentMetadata): Promise<unknown> {
if (!metatype || !this.toValidate(metatype)) {
return value;
}
const object = plainToInstance(metatype, value);
const errors = await validate(object);
if (errors.length > 0) {
const messages = errors.map((error) => {
const constraints = error.constraints || {};
return {
field: error.property,
errors: Object.values(constraints),
};
});
throw new BadRequestException({
code: ERROR_CODES.VALIDATION_ERROR,
message: 'Validation failed',
details: { validationErrors: messages },
});
}
return object;
}
private toValidate(metatype: new (...args: unknown[]) => unknown): boolean {
const types: (new (...args: unknown[]) => unknown)[] = [String, Boolean, Number, Array, Object];
return !types.includes(metatype);
}
}

View File

@@ -0,0 +1,7 @@
export type PaginatedResult<T> = {
data: T[];
total: number;
page: number;
limit: number;
totalPages: number;
};

View File

@@ -0,0 +1,83 @@
import { createCipheriv, createDecipheriv, randomBytes, scryptSync } from 'crypto';
import * as bcrypt from 'bcrypt';
import * as crypto from 'crypto';
export async function hash(data: string): Promise<string> {
return bcrypt.hash(data, 10);
}
export async function generateApiKey(): Promise<{
apiKey: string;
apiSecret: string;
apiKeyHash: string;
apiSecretHash: string;
}> {
const apiKey = `goa_${crypto.randomBytes(16).toString('hex')}`;
const apiSecret = crypto.randomBytes(32).toString('hex');
const [apiKeyHash, apiSecretHash] = await Promise.all([
hash(apiKey),
hash(apiSecret),
]);
return {
apiKey,
apiSecret,
apiKeyHash,
apiSecretHash,
};
}
export class CryptoUtil {
private static readonly ALGORITHM = 'aes-256-gcm';
private static readonly SALT_LENGTH = 16;
private static readonly TAG_LENGTH = 16;
private static readonly IV_LENGTH = 16;
static encrypt(data: string, password: string): string {
const salt = randomBytes(CryptoUtil.SALT_LENGTH);
const key = scryptSync(password, salt, 32);
const iv = randomBytes(CryptoUtil.IV_LENGTH);
const cipher = createCipheriv(CryptoUtil.ALGORITHM, key, iv);
const encrypted = Buffer.concat([
cipher.update(data, 'utf8'),
cipher.final(),
]);
const authTag = cipher.getAuthTag();
return Buffer.concat([salt, iv, authTag, encrypted]).toString('hex');
}
static decrypt(encryptedData: string, password: string): string {
const buffer = Buffer.from(encryptedData, 'hex');
const salt = buffer.subarray(0, CryptoUtil.SALT_LENGTH);
const iv = buffer.subarray(
CryptoUtil.SALT_LENGTH,
CryptoUtil.SALT_LENGTH + CryptoUtil.IV_LENGTH,
);
const authTag = buffer.subarray(
CryptoUtil.SALT_LENGTH + CryptoUtil.IV_LENGTH,
CryptoUtil.SALT_LENGTH + CryptoUtil.IV_LENGTH + CryptoUtil.TAG_LENGTH,
);
const encrypted = buffer.subarray(
CryptoUtil.SALT_LENGTH + CryptoUtil.IV_LENGTH + CryptoUtil.TAG_LENGTH,
);
const key = scryptSync(password, salt, 32);
const decipher = createDecipheriv(CryptoUtil.ALGORITHM, key, iv);
decipher.setAuthTag(authTag);
return decipher.update(encrypted) + decipher.final('utf8');
}
static generateKey(length: number = 32): string {
return randomBytes(length).toString('hex');
}
static generateIV(length: number = 16): string {
return randomBytes(length).toString('hex');
}
}

View File

@@ -0,0 +1,97 @@
export class DateUtil {
static getCurrentTimestamp(): Date {
return new Date();
}
static getTimestampInSeconds(): number {
return Math.floor(Date.now() / 1000);
}
static addDays(date: Date, days: number): Date {
const result = new Date(date);
result.setDate(result.getDate() + days);
return result;
}
static addHours(date: Date, hours: number): Date {
const result = new Date(date);
result.setHours(result.getHours() + hours);
return result;
}
static addMinutes(date: Date, minutes: number): Date {
const result = new Date(date);
result.setMinutes(result.getMinutes() + minutes);
return result;
}
static addSeconds(date: Date, seconds: number): Date {
const result = new Date(date);
result.setSeconds(result.getSeconds() + seconds);
return result;
}
static isExpired(date: Date): boolean {
return date < this.getCurrentTimestamp();
}
static getDifferenceInSeconds(date1: Date, date2: Date): number {
return Math.floor((date1.getTime() - date2.getTime()) / 1000);
}
static getDifferenceInMinutes(date1: Date, date2: Date): number {
return Math.floor(this.getDifferenceInSeconds(date1, date2) / 60);
}
static getDifferenceInHours(date1: Date, date2: Date): number {
return Math.floor(this.getDifferenceInMinutes(date1, date2) / 60);
}
static getDifferenceInDays(date1: Date, date2: Date): number {
return Math.floor(this.getDifferenceInHours(date1, date2) / 24);
}
static startOfDay(date: Date = new Date()): Date {
const result = new Date(date);
result.setHours(0, 0, 0, 0);
return result;
}
static endOfDay(date: Date = new Date()): Date {
const result = new Date(date);
result.setHours(23, 59, 59, 999);
return result;
}
static startOfMonth(date: Date = new Date()): Date {
const result = new Date(date);
result.setDate(1);
result.setHours(0, 0, 0, 0);
return result;
}
static endOfMonth(date: Date = new Date()): Date {
const result = new Date(date.getFullYear(), date.getMonth() + 1, 0);
result.setHours(23, 59, 59, 999);
return result;
}
static formatISO(date: Date): string {
return date.toISOString();
}
static formatDate(date: Date, format: string = 'DD/MM/YYYY'): string {
const day = String(date.getDate()).padStart(2, '0');
const month = String(date.getMonth() + 1).padStart(2, '0');
const year = date.getFullYear();
return format
.replace('DD', day)
.replace('MM', month)
.replace('YYYY', year.toString());
}
static parseISO(dateString: string): Date {
return new Date(dateString);
}
}

View File

@@ -0,0 +1,75 @@
import * as crypto from 'crypto';
import * as bcrypt from 'bcrypt';
export class HashUtil {
/**
* Generate SHA-256 hash from buffer
*/
static sha256(buffer: Buffer): string {
return crypto.createHash('sha256').update(buffer).digest('hex');
}
/**
* Generate SHA-256 hash from string
*/
static sha256String(input: string): string {
return crypto.createHash('sha256').update(input).digest('hex');
}
/**
* Generate Keccak-256 hash (Ethereum compatible)
*/
static keccak256(input: string): string {
return crypto.createHash('sha3-256').update(input).digest('hex');
}
/**
* Hash password using bcrypt
*/
static async hashPassword(password: string, rounds = 10): Promise<string> {
return bcrypt.hash(password, rounds);
}
/**
* Compare password with hash
*/
static async comparePassword(password: string, hash: string): Promise<boolean> {
return bcrypt.compare(password, hash);
}
/**
* Generate secure random API key
*/
static generateApiKey(): string {
return crypto.randomBytes(32).toString('hex');
}
/**
* Generate secure random secret
*/
static generateSecret(): string {
return crypto.randomBytes(48).toString('base64url');
}
/**
* Generate HMAC signature for webhooks
*/
static generateHmacSignature(payload: string, secret: string): string {
return crypto.createHmac('sha256', secret).update(payload).digest('hex');
}
/**
* Verify HMAC signature
*/
static verifyHmacSignature(payload: string, secret: string, signature: string): boolean {
const expectedSignature = this.generateHmacSignature(payload, secret);
return crypto.timingSafeEqual(Buffer.from(signature), Buffer.from(expectedSignature));
}
/**
* Generate UUID v4
*/
static generateUuid(): string {
return crypto.randomUUID();
}
}

View File

@@ -0,0 +1,5 @@
export * from './hash.util';
export * from './crypto.util';
export * from './date.util';
export * from './request-number.util';
export * from './pagination.util';

View File

@@ -0,0 +1,25 @@
import { QueryBuilder } from 'objection';
export interface PaginatedResult<T> {
results: T[];
total: number;
}
export interface PaginationOptions {
page: number;
limit: number;
}
export async function paginate<T>(
query: QueryBuilder<any, T[]>,
page: number,
limit: number,
): Promise<PaginatedResult<T>> {
const p = page > 0 ? page - 1 : 0;
const l = limit > 0 ? limit : 10;
const { results, total } = await query.page(p, l);
return { results, total };
}
export { QueryBuilder };

View File

@@ -0,0 +1,41 @@
import { RequestType } from '../enums';
import { REQUEST_NUMBER_PREFIX } from '../constants';
export class RequestNumberUtil {
/**
* Generate unique request number
* Format: {PREFIX}-{YEAR}-{SEQUENCE}
* Example: RL-2024-000001
*/
static generate(requestType: RequestType, sequence: number): string {
const prefix = REQUEST_NUMBER_PREFIX[requestType] || 'RQ';
const year = new Date().getFullYear();
const paddedSequence = sequence.toString().padStart(6, '0');
return `${prefix}-${year}-${paddedSequence}`;
}
/**
* Parse request number to extract components
*/
static parse(requestNumber: string): {
prefix: string;
year: number;
sequence: number;
} | null {
const match = requestNumber.match(/^([A-Z]+)-(\d{4})-(\d+)$/);
if (!match) return null;
return {
prefix: match[1],
year: parseInt(match[2], 10),
sequence: parseInt(match[3], 10),
};
}
/**
* Validate request number format
*/
static isValid(requestNumber: string): boolean {
return /^[A-Z]+-\d{4}-\d{6}$/.test(requestNumber);
}
}

View File

@@ -0,0 +1,59 @@
import { registerAs } from '@nestjs/config';
import * as Joi from 'joi';
export const appConfigValidationSchema = Joi.object({
NODE_ENV: Joi.string().valid('development', 'production', 'test').default('development'),
PORT: Joi.number().default(3001),
API_VERSION: Joi.string().default('v1'),
API_PREFIX: Joi.string().default('api'),
DATABASE_HOST: Joi.string().required(),
DATABASE_PORT: Joi.number().default(5432),
DATABASE_NAME: Joi.string().required(),
DATABASE_USER: Joi.string().required(),
DATABASE_PASSWORD: Joi.string().required(),
DATABASE_SSL: Joi.boolean().default(false),
BESU_RPC_URL: Joi.string().uri().required(),
BESU_CHAIN_ID: Joi.number().required(),
CONTRACT_ADDRESS_LICENSE_NFT: Joi.string().allow('').default(''),
CONTRACT_ADDRESS_APPROVAL_MANAGER: Joi.string().allow('').default(''),
CONTRACT_ADDRESS_DEPARTMENT_REGISTRY: Joi.string().allow('').default(''),
CONTRACT_ADDRESS_WORKFLOW_REGISTRY: Joi.string().allow('').default(''),
PLATFORM_WALLET_PRIVATE_KEY: Joi.string().allow('').default(''),
MINIO_ENDPOINT: Joi.string().required(),
MINIO_PORT: Joi.number().default(9000),
MINIO_ACCESS_KEY: Joi.string().required(),
MINIO_SECRET_KEY: Joi.string().required(),
MINIO_BUCKET_DOCUMENTS: Joi.string().default('goa-gel-documents'),
MINIO_USE_SSL: Joi.boolean().default(false),
REDIS_HOST: Joi.string().default('localhost'),
REDIS_PORT: Joi.number().default(6379),
REDIS_PASSWORD: Joi.string().allow('').default(''),
JWT_SECRET: Joi.string().min(32).required(),
JWT_EXPIRATION: Joi.string().default('1d'),
API_KEY_SALT_ROUNDS: Joi.number().default(10),
MAX_FILE_SIZE: Joi.number().default(10485760),
ALLOWED_MIME_TYPES: Joi.string().default('application/pdf,image/jpeg,image/png'),
RATE_LIMIT_GLOBAL: Joi.number().default(100),
RATE_LIMIT_API_KEY: Joi.number().default(1000),
LOG_LEVEL: Joi.string().valid('error', 'warn', 'info', 'debug').default('info'),
CORS_ORIGIN: Joi.string().default('http://localhost:3000'),
SWAGGER_ENABLED: Joi.boolean().default(true),
});
export default registerAs('app', () => ({
nodeEnv: process.env.NODE_ENV || 'development',
port: parseInt(process.env.PORT || '3001', 10),
apiVersion: process.env.API_VERSION || 'v1',
apiPrefix: process.env.API_PREFIX || 'api',
corsOrigin: process.env.CORS_ORIGIN || 'http://localhost:3000',
swaggerEnabled: process.env.SWAGGER_ENABLED === 'true',
}));

View File

@@ -0,0 +1,35 @@
import { registerAs } from '@nestjs/config';
export interface BlockchainConfig {
rpcUrl: string;
chainId: number;
networkId: number;
contracts: {
licenseNft: string | undefined;
approvalManager: string | undefined;
departmentRegistry: string | undefined;
workflowRegistry: string | undefined;
};
platformWallet: {
privateKey: string | undefined;
};
gasLimit: number;
confirmationBlocks: number;
}
export default registerAs('blockchain', () => ({
rpcUrl: process.env.BESU_RPC_URL || 'http://localhost:8545',
chainId: parseInt(process.env.BESU_CHAIN_ID || '1337', 10),
networkId: parseInt(process.env.BESU_NETWORK_ID || '2024', 10),
contracts: {
licenseNft: process.env.CONTRACT_ADDRESS_LICENSE_NFT,
approvalManager: process.env.CONTRACT_ADDRESS_APPROVAL_MANAGER,
departmentRegistry: process.env.CONTRACT_ADDRESS_DEPARTMENT_REGISTRY,
workflowRegistry: process.env.CONTRACT_ADDRESS_WORKFLOW_REGISTRY,
},
platformWallet: {
privateKey: process.env.PLATFORM_WALLET_PRIVATE_KEY,
},
gasLimit: parseInt(process.env.BLOCKCHAIN_GAS_LIMIT || '8000000', 10),
confirmationBlocks: parseInt(process.env.BLOCKCHAIN_CONFIRMATION_BLOCKS || '1', 10),
}));

View File

@@ -0,0 +1,13 @@
import { registerAs } from '@nestjs/config';
export default registerAs('database', () => ({
host: process.env.DATABASE_HOST || 'localhost',
port: parseInt(process.env.DATABASE_PORT || '5432', 10),
database: process.env.DATABASE_NAME || 'goa_gel_platform',
username: process.env.DATABASE_USER || 'postgres',
password: process.env.DATABASE_PASSWORD || 'postgres',
ssl: process.env.DATABASE_SSL === 'true',
logging: process.env.DATABASE_LOGGING === 'true',
synchronize: false,
migrationsRun: true,
}));

View File

@@ -0,0 +1,7 @@
export { default as appConfig, appConfigValidationSchema } from './app.config';
export { default as databaseConfig } from './database.config';
export { default as blockchainConfig } from './blockchain.config';
export { default as storageConfig } from './storage.config';
export { default as redisConfig } from './redis.config';
export { default as jwtConfig } from './jwt.config';
export { default as minioConfig } from './minio.config';

View File

@@ -0,0 +1,32 @@
import { registerAs } from '@nestjs/config';
export interface JwtConfig {
secret: string;
expiresIn: string;
refreshSecret: string;
refreshExpiresIn: string;
apiKeyHeader: string;
apiKeyValue: string;
}
export default registerAs('jwt', (): JwtConfig => {
const secret = process.env.JWT_SECRET || 'your-super-secret-jwt-key-change-this-in-production';
const refreshSecret =
process.env.JWT_REFRESH_SECRET || 'your-refresh-secret-key-change-this-in-production';
if (
secret === 'your-super-secret-jwt-key-change-this-in-production' ||
refreshSecret === 'your-refresh-secret-key-change-this-in-production'
) {
console.warn('Warning: JWT secrets are using default values. Change these in production!');
}
return {
secret,
expiresIn: process.env.JWT_EXPIRATION || '7d',
refreshSecret,
refreshExpiresIn: process.env.JWT_REFRESH_EXPIRATION || '30d',
apiKeyHeader: process.env.API_KEY_HEADER || 'X-API-Key',
apiKeyValue: process.env.API_KEY_VALUE || 'your-api-key-change-this-in-production',
};
});

View File

@@ -0,0 +1,32 @@
import { registerAs } from '@nestjs/config';
export interface MinioConfig {
endpoint: string;
port: number;
accessKey: string;
secretKey: string;
useSSL: boolean;
region: string;
bucketDocuments: string;
bucketArchives: string;
}
export default registerAs('minio', (): MinioConfig => {
const accessKey = process.env.MINIO_ACCESS_KEY || 'minioadmin';
const secretKey = process.env.MINIO_SECRET_KEY || 'minioadmin_secret_change_this';
if (accessKey === 'minioadmin' || secretKey === 'minioadmin_secret_change_this') {
console.warn('Warning: MinIO credentials are using default values. Change these in production!');
}
return {
endpoint: process.env.MINIO_ENDPOINT || 'localhost',
port: parseInt(process.env.MINIO_PORT || '9000', 10),
accessKey,
secretKey,
useSSL: process.env.MINIO_USE_SSL === 'true',
region: process.env.MINIO_REGION || 'us-east-1',
bucketDocuments: process.env.MINIO_BUCKET_DOCUMENTS || 'goa-gel-documents',
bucketArchives: process.env.MINIO_BUCKET_ARCHIVES || 'goa-gel-archives',
};
});

View File

@@ -0,0 +1,8 @@
import { registerAs } from '@nestjs/config';
export default registerAs('redis', () => ({
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT || '6379', 10),
password: process.env.REDIS_PASSWORD || undefined,
db: parseInt(process.env.REDIS_DB || '0', 10),
}));

View File

@@ -0,0 +1,12 @@
import { registerAs } from '@nestjs/config';
export default registerAs('storage', () => ({
endpoint: process.env.MINIO_ENDPOINT || 'localhost',
port: parseInt(process.env.MINIO_PORT || '9000', 10),
accessKey: process.env.MINIO_ACCESS_KEY || 'minioadmin',
secretKey: process.env.MINIO_SECRET_KEY || 'minioadmin',
bucket: process.env.MINIO_BUCKET_DOCUMENTS || 'goa-gel-documents',
useSSL: process.env.MINIO_USE_SSL === 'true',
region: process.env.MINIO_REGION || 'us-east-1',
signedUrlExpiry: parseInt(process.env.MINIO_SIGNED_URL_EXPIRY || '3600', 10),
}));

View File

@@ -0,0 +1,3 @@
<claude-mem-context>
</claude-mem-context>

View File

@@ -0,0 +1,298 @@
# Goa GEL Database Schema
This directory contains all database entities, migrations, and seeders for the Goa GEL Blockchain Document Verification Platform.
## Directory Structure
```
src/database/
├── entities/ # TypeORM entity definitions
│ ├── applicant.entity.ts
│ ├── department.entity.ts
│ ├── license-request.entity.ts
│ ├── document.entity.ts
│ ├── document-version.entity.ts
│ ├── approval.entity.ts
│ ├── workflow.entity.ts
│ ├── workflow-state.entity.ts
│ ├── webhook.entity.ts
│ ├── webhook-log.entity.ts
│ ├── audit-log.entity.ts
│ ├── blockchain-transaction.entity.ts
│ └── index.ts
├── migrations/ # TypeORM migrations
│ └── 1704067200000-InitialSchema.ts
├── seeders/ # Database seeders
│ └── seed.ts
├── data-source.ts # TypeORM DataSource configuration
└── index.ts # Main exports
```
## Database Entities Overview
### Core Entities
1. **Applicant** - Represents individuals applying for licenses
- Unique: digilockerId, email, walletAddress
- Relations: OneToMany with LicenseRequest
2. **Department** - Represents government departments handling approvals
- Unique: code, walletAddress
- Relations: OneToMany with Approval, OneToMany with Webhook
3. **Workflow** - Defines multi-stage approval workflows
- Unique: workflowType
- Contains: stages, rules, and requirements
- Relations: OneToMany with LicenseRequest
4. **LicenseRequest** - Main entity for license applications
- Unique: requestNumber
- Status: DRAFT, SUBMITTED, IN_REVIEW, PENDING_RESUBMISSION, APPROVED, REJECTED, REVOKED, CANCELLED
- Relations: ManyToOne Applicant, ManyToOne Workflow, OneToMany Document, OneToMany Approval, OneToOne WorkflowState
### Document Management
5. **Document** - Represents uploaded documents for a request
- Tracks: filename, version, hash, minio bucket location
- Relations: ManyToOne LicenseRequest, OneToMany DocumentVersion
6. **DocumentVersion** - Audit trail for document changes
- Tracks: version number, hash, file size, mime type, uploader
- Ensures: (documentId, version) uniqueness
### Approval & Workflow
7. **Approval** - Records department approvals
- Status: PENDING, APPROVED, REJECTED, CHANGES_REQUESTED, REVIEW_REQUIRED
- Tracks: remarks, reviewed documents, blockchain tx hash
- Can be invalidated with reason
8. **WorkflowState** - Tracks execution state of workflow
- Current stage, completed stages, pending approvals
- Full execution log with timestamps and details
- OneToOne relationship with LicenseRequest
### Webhooks & Audit
9. **Webhook** - Department webhook configurations
- Stores: URL, events to listen for, secret hash
- Relations: OneToMany with WebhookLog
10. **WebhookLog** - Audit trail for webhook deliveries
- Status: PENDING, SUCCESS, FAILED
- Tracks: response status, body, response time, retry count
11. **AuditLog** - Comprehensive audit trail
- Tracks: entity changes, actor, old/new values
- Stores: IP address, user agent, correlation ID
- Index optimization for queries by entity type and actor
### Blockchain Integration
12. **BlockchainTransaction** - NFT minting and on-chain operations
- Types: MINT_NFT, APPROVAL, DOC_UPDATE, REJECT, REVOKE
- Status: PENDING, CONFIRMED, FAILED
- Tracks: tx hash, block number, gas used, error messages
## Environment Variables
Set the following in your `.env` file:
```env
# Database Connection
DB_HOST=localhost
DB_PORT=5432
DB_USERNAME=postgres
DB_PASSWORD=your_password
DB_NAME=goa_gel_db
NODE_ENV=development
```
## Setup Instructions
### 1. Install Dependencies
```bash
npm install typeorm pg uuid crypto
```
### 2. Create Database
```bash
# Using PostgreSQL client
createdb goa_gel_db
# Or using Docker
docker run --name goa_gel_postgres \
-e POSTGRES_DB=goa_gel_db \
-e POSTGRES_PASSWORD=your_password \
-p 5432:5432 \
-d postgres:15-alpine
```
### 3. Run Migrations
```bash
# Run all pending migrations
npx typeorm migration:run -d src/database/data-source.ts
# Generate a new migration (auto-detects schema changes)
npx typeorm migration:generate -d src/database/data-source.ts -n YourMigrationName
# Revert last migration
npx typeorm migration:revert -d src/database/data-source.ts
```
### 4. Seed Database with Sample Data
```bash
# Run the seed script
npx ts-node src/database/seeders/seed.ts
```
After seeding, you'll have:
- 4 sample departments (Fire, Tourism, Municipal, Health)
- 1 RESORT_LICENSE workflow with 5 stages
- 2 sample applicants
- 1 license request in DRAFT status with workflow state
### 5. Verify Setup
```bash
# Connect to the database
psql goa_gel_db
# List tables
\dt
# Check migrations table
SELECT * FROM typeorm_migrations;
# Exit
\q
```
## Entity Relationships Diagram
```
Applicant (1) ──→ (N) LicenseRequest
├──→ (N) Document ──→ (N) DocumentVersion
├──→ (N) Approval ←─── (1) Department (N)
└──→ (1) WorkflowState
Department (1) ──→ (N) Approval
(1) ──→ (N) Webhook ──→ (N) WebhookLog
Workflow (1) ──→ (N) LicenseRequest
AuditLog - tracks all changes to core entities
BlockchainTransaction - records all on-chain operations
```
## Key Features
### Indexes for Performance
- All frequently queried columns are indexed
- Composite indexes for common query patterns
- JSONB columns for flexible metadata storage
### Cascade Operations
- DELETE cascades properly configured
- Orphaned records cleaned up automatically
### Audit Trail
- Every change tracked in audit_logs
- Actor type and ID recorded
- Old/new values stored for analysis
- IP address and user agent captured
### Blockchain Integration
- All critical operations can be recorded on-chain
- Transaction status tracking
- Error handling with rollback support
### Workflow State Management
- Execution log with full history
- Pending approvals tracking
- Stage transition audit trail
- Extensible for complex workflows
## Common Queries
### Get all license requests for an applicant
```sql
SELECT lr.* FROM license_requests lr
WHERE lr.applicantId = $1
ORDER BY lr.createdAt DESC;
```
### Get pending approvals for a department
```sql
SELECT a.* FROM approvals a
WHERE a.departmentId = $1 AND a.status = 'PENDING'
ORDER BY a.createdAt ASC;
```
### Get audit trail for a specific request
```sql
SELECT al.* FROM audit_logs al
WHERE al.entityType = 'REQUEST' AND al.entityId = $1
ORDER BY al.createdAt DESC;
```
### Get blockchain transaction status
```sql
SELECT bt.* FROM blockchain_transactions bt
WHERE bt.relatedEntityId = $1
ORDER BY bt.createdAt DESC;
```
## Maintenance
### Backup Database
```bash
pg_dump goa_gel_db > backup_$(date +%Y%m%d_%H%M%S).sql
```
### Restore Database
```bash
psql goa_gel_db < backup_file.sql
```
### Monitor Performance
```sql
-- Check table sizes
SELECT schemaname, tablename, pg_size_pretty(pg_total_relation_size(schemaname||'.'||tablename))
FROM pg_tables
WHERE schemaname NOT IN ('pg_catalog', 'information_schema')
ORDER BY pg_total_relation_size(schemaname||'.'||tablename) DESC;
-- Check slow queries
SELECT query, calls, mean_time FROM pg_stat_statements
ORDER BY mean_time DESC LIMIT 10;
```
## Troubleshooting
### Connection Issues
- Verify PostgreSQL service is running
- Check DB_HOST, DB_PORT, DB_USERNAME, DB_PASSWORD
- Ensure database exists: `createdb goa_gel_db`
### Migration Issues
- Check TypeORM synchronize is false in production
- Ensure migrations run in correct order
- Validate SQL syntax in migration files
### Seeding Issues
- Drop existing data: `npx typeorm schema:drop -d src/database/data-source.ts`
- Re-run migrations and seed
- Check console output for specific errors
## Related Files
- `/src/database/data-source.ts` - TypeORM DataSource configuration
- `/src/database/migrations/` - SQL migration files
- `/src/database/seeders/` - Sample data generators
- `.env` - Environment variables

View File

@@ -0,0 +1,53 @@
import { Module, Global, OnModuleDestroy, Inject } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import Knex from 'knex';
import { Model } from 'objection';
import { ModelsModule } from './models.module';
export const KNEX_CONNECTION = 'KNEX_CONNECTION';
@Global()
@Module({
imports: [ModelsModule],
providers: [
{
provide: KNEX_CONNECTION,
inject: [ConfigService],
useFactory: async (configService: ConfigService) => {
const knex = Knex({
client: 'pg',
connection: {
host: configService.get<string>('database.host'),
port: configService.get<number>('database.port'),
database: configService.get<string>('database.database'),
user: configService.get<string>('database.username'),
password: configService.get<string>('database.password'),
ssl: configService.get<boolean>('database.ssl')
? { rejectUnauthorized: false }
: false,
},
pool: {
min: 2,
max: 10,
},
debug: configService.get<boolean>('database.logging'),
});
// Bind Objection.js to Knex
Model.knex(knex);
return knex;
},
},
],
exports: [KNEX_CONNECTION, ModelsModule],
})
export class DatabaseModule implements OnModuleDestroy {
constructor(@Inject(KNEX_CONNECTION) private readonly knex: Knex.Knex) { }
async onModuleDestroy(): Promise<void> {
if (this.knex) {
await (this.knex as unknown as Knex.Knex).destroy();
}
}
}

View File

@@ -0,0 +1,3 @@
export * from './models';
export { DatabaseModule, KNEX_CONNECTION } from './database.module';
export { ModelsModule } from './models.module';

View File

@@ -0,0 +1,81 @@
import type { Knex } from 'knex';
import { config } from 'dotenv';
config();
const knexConfig: { [key: string]: Knex.Config } = {
development: {
client: 'pg',
connection: {
host: process.env.DATABASE_HOST || 'localhost',
port: parseInt(process.env.DATABASE_PORT || '5432', 10),
database: process.env.DATABASE_NAME || 'goa_gel_platform',
user: process.env.DATABASE_USER || 'postgres',
password: process.env.DATABASE_PASSWORD || 'postgres',
},
pool: {
min: 2,
max: 10,
},
migrations: {
directory: './migrations',
extension: 'ts',
tableName: 'knex_migrations',
},
seeds: {
directory: './seeds',
extension: 'ts',
},
},
production: {
client: 'pg',
connection: {
host: process.env.DATABASE_HOST,
port: parseInt(process.env.DATABASE_PORT || '5432', 10),
database: process.env.DATABASE_NAME,
user: process.env.DATABASE_USER,
password: process.env.DATABASE_PASSWORD,
ssl: process.env.DATABASE_SSL === 'true' ? { rejectUnauthorized: false } : false,
},
pool: {
min: 2,
max: 20,
},
migrations: {
directory: './migrations',
extension: 'js',
tableName: 'knex_migrations',
},
seeds: {
directory: './seeds',
extension: 'js',
},
},
test: {
client: 'pg',
connection: {
host: process.env.DATABASE_HOST || 'localhost',
port: parseInt(process.env.DATABASE_PORT || '5432', 10),
database: process.env.DATABASE_NAME || 'goa_gel_platform_test',
user: process.env.DATABASE_USER || 'postgres',
password: process.env.DATABASE_PASSWORD || 'postgres',
},
pool: {
min: 1,
max: 5,
},
migrations: {
directory: './migrations',
extension: 'ts',
tableName: 'knex_migrations',
},
seeds: {
directory: './seeds',
extension: 'ts',
},
},
};
export default knexConfig;

View File

@@ -0,0 +1,246 @@
import type { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
// Enable UUID extension
await knex.raw('CREATE EXTENSION IF NOT EXISTS "uuid-ossp"');
// Applicants table
await knex.schema.createTable('applicants', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.string('digilocker_id', 255).notNullable().unique();
table.string('name', 255).notNullable();
table.string('email', 255).notNullable();
table.string('phone', 20);
table.string('wallet_address', 42);
table.boolean('is_active').notNullable().defaultTo(true);
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.index('digilocker_id', 'idx_applicant_digilocker');
table.index('email', 'idx_applicant_email');
});
// Departments table
await knex.schema.createTable('departments', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.string('code', 50).notNullable().unique();
table.string('name', 255).notNullable();
table.string('wallet_address', 42).unique();
table.string('api_key_hash', 255);
table.string('api_secret_hash', 255);
table.string('webhook_url', 500);
table.string('webhook_secret_hash', 255);
table.boolean('is_active').notNullable().defaultTo(true);
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.index('code', 'idx_department_code');
table.index('is_active', 'idx_department_active');
});
// Workflows table
await knex.schema.createTable('workflows', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.string('workflow_type', 100).notNullable().unique();
table.string('name', 255).notNullable();
table.text('description');
table.integer('version').notNullable().defaultTo(1);
table.jsonb('definition').notNullable();
table.boolean('is_active').notNullable().defaultTo(true);
table.uuid('created_by');
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.index('workflow_type', 'idx_workflow_type');
table.index('is_active', 'idx_workflow_active');
});
// License Requests table
await knex.schema.createTable('license_requests', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.string('request_number', 50).notNullable().unique();
table.bigInteger('token_id');
table.uuid('applicant_id').notNullable().references('id').inTable('applicants').onDelete('CASCADE');
table.string('request_type', 100).notNullable();
table.uuid('workflow_id').references('id').inTable('workflows').onDelete('SET NULL');
table.string('status', 50).notNullable().defaultTo('DRAFT');
table.jsonb('metadata');
table.string('current_stage_id', 100);
table.string('blockchain_tx_hash', 66);
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('submitted_at');
table.timestamp('approved_at');
table.index('request_number', 'idx_request_number');
table.index('applicant_id', 'idx_request_applicant');
table.index('status', 'idx_request_status');
table.index('request_type', 'idx_request_type');
table.index('created_at', 'idx_request_created');
table.index(['status', 'request_type'], 'idx_request_status_type');
});
// Documents table
await knex.schema.createTable('documents', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.uuid('request_id').notNullable().references('id').inTable('license_requests').onDelete('CASCADE');
table.string('doc_type', 100).notNullable();
table.string('original_filename', 255).notNullable();
table.integer('current_version').notNullable().defaultTo(1);
table.string('current_hash', 66).notNullable();
table.string('minio_bucket', 100).notNullable();
table.boolean('is_active').notNullable().defaultTo(true);
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.index('request_id', 'idx_document_request');
table.index('doc_type', 'idx_document_type');
});
// Document Versions table
await knex.schema.createTable('document_versions', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.uuid('document_id').notNullable().references('id').inTable('documents').onDelete('CASCADE');
table.integer('version').notNullable();
table.string('hash', 66).notNullable();
table.string('minio_path', 500).notNullable();
table.bigInteger('file_size').notNullable();
table.string('mime_type', 100).notNullable();
table.uuid('uploaded_by').notNullable();
table.string('blockchain_tx_hash', 66);
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.unique(['document_id', 'version'], { indexName: 'uq_document_version' });
table.index('document_id', 'idx_docversion_document');
});
// Approvals table
await knex.schema.createTable('approvals', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.uuid('request_id').notNullable().references('id').inTable('license_requests').onDelete('CASCADE');
table.uuid('department_id').notNullable().references('id').inTable('departments').onDelete('CASCADE');
table.string('status', 50).notNullable().defaultTo('PENDING');
table.text('remarks');
table.string('remarks_hash', 66);
table.jsonb('reviewed_documents');
table.string('blockchain_tx_hash', 66);
table.boolean('is_active').notNullable().defaultTo(true);
table.timestamp('invalidated_at');
table.string('invalidation_reason', 255);
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.index('request_id', 'idx_approval_request');
table.index('department_id', 'idx_approval_department');
table.index('status', 'idx_approval_status');
table.index(['request_id', 'department_id'], 'idx_approval_request_dept');
});
// Workflow States table
await knex.schema.createTable('workflow_states', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.uuid('request_id').notNullable().unique().references('id').inTable('license_requests').onDelete('CASCADE');
table.string('current_stage_id', 100).notNullable();
table.jsonb('completed_stages').notNullable().defaultTo('[]');
table.jsonb('pending_approvals').notNullable().defaultTo('[]');
table.jsonb('execution_log').notNullable().defaultTo('[]');
table.timestamp('stage_started_at');
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.index('request_id', 'idx_wfstate_request');
});
// Webhooks table
await knex.schema.createTable('webhooks', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.uuid('department_id').notNullable().references('id').inTable('departments').onDelete('CASCADE');
table.string('url', 500).notNullable();
table.jsonb('events').notNullable();
table.string('secret_hash', 255).notNullable();
table.boolean('is_active').notNullable().defaultTo(true);
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.index('department_id', 'idx_webhook_department');
});
// Webhook Logs table
await knex.schema.createTable('webhook_logs', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.uuid('webhook_id').notNullable().references('id').inTable('webhooks').onDelete('CASCADE');
table.string('event_type', 100).notNullable();
table.jsonb('payload').notNullable();
table.integer('response_status');
table.text('response_body');
table.integer('response_time');
table.integer('retry_count').notNullable().defaultTo(0);
table.string('status', 20).notNullable().defaultTo('PENDING');
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.index('webhook_id', 'idx_webhooklog_webhook');
table.index('event_type', 'idx_webhooklog_event');
table.index('status', 'idx_webhooklog_status');
table.index('created_at', 'idx_webhooklog_created');
});
// Audit Logs table
await knex.schema.createTable('audit_logs', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.string('entity_type', 50).notNullable();
table.uuid('entity_id').notNullable();
table.string('action', 50).notNullable();
table.string('actor_type', 50).notNullable();
table.uuid('actor_id');
table.jsonb('old_value');
table.jsonb('new_value');
table.string('ip_address', 45);
table.text('user_agent');
table.string('correlation_id', 100);
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.index(['entity_type', 'entity_id'], 'idx_audit_entity');
table.index('entity_type', 'idx_audit_entitytype');
table.index('action', 'idx_audit_action');
table.index('created_at', 'idx_audit_created');
table.index('correlation_id', 'idx_audit_correlation');
});
// Blockchain Transactions table
await knex.schema.createTable('blockchain_transactions', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.string('tx_hash', 66).notNullable().unique();
table.string('tx_type', 50).notNullable();
table.string('related_entity_type', 50).notNullable();
table.uuid('related_entity_id').notNullable();
table.string('from_address', 42).notNullable();
table.string('to_address', 42);
table.string('status', 20).notNullable().defaultTo('PENDING');
table.bigInteger('block_number');
table.bigInteger('gas_used');
table.text('error_message');
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('confirmed_at');
table.index('tx_hash', 'idx_bctx_hash');
table.index('tx_type', 'idx_bctx_type');
table.index('status', 'idx_bctx_status');
table.index('related_entity_id', 'idx_bctx_entity');
table.index('created_at', 'idx_bctx_created');
});
}
export async function down(knex: Knex): Promise<void> {
await knex.schema.dropTableIfExists('blockchain_transactions');
await knex.schema.dropTableIfExists('audit_logs');
await knex.schema.dropTableIfExists('webhook_logs');
await knex.schema.dropTableIfExists('webhooks');
await knex.schema.dropTableIfExists('workflow_states');
await knex.schema.dropTableIfExists('approvals');
await knex.schema.dropTableIfExists('document_versions');
await knex.schema.dropTableIfExists('documents');
await knex.schema.dropTableIfExists('license_requests');
await knex.schema.dropTableIfExists('workflows');
await knex.schema.dropTableIfExists('departments');
await knex.schema.dropTableIfExists('applicants');
}

View File

@@ -0,0 +1,107 @@
import type { Knex } from 'knex';
export async function up(knex: Knex): Promise<void> {
// Users table for email/password authentication
await knex.schema.createTable('users', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.string('email', 255).notNullable().unique();
table.string('password_hash', 255).notNullable();
table.string('name', 255).notNullable();
table.enum('role', ['ADMIN', 'DEPARTMENT', 'CITIZEN']).notNullable();
table.uuid('department_id').references('id').inTable('departments').onDelete('SET NULL');
table.string('wallet_address', 42);
table.text('wallet_encrypted_key');
table.string('phone', 20);
table.boolean('is_active').notNullable().defaultTo(true);
table.timestamp('last_login_at');
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.index('email', 'idx_user_email');
table.index('role', 'idx_user_role');
table.index('department_id', 'idx_user_department');
table.index('is_active', 'idx_user_active');
});
// Wallets table for storing encrypted private keys
await knex.schema.createTable('wallets', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.string('address', 42).notNullable().unique();
table.text('encrypted_private_key').notNullable();
table.enum('owner_type', ['USER', 'DEPARTMENT']).notNullable();
table.uuid('owner_id').notNullable();
table.boolean('is_active').notNullable().defaultTo(true);
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.timestamp('updated_at').notNullable().defaultTo(knex.fn.now());
table.index('address', 'idx_wallet_address');
table.index(['owner_type', 'owner_id'], 'idx_wallet_owner');
});
// Blockchain events table
await knex.schema.createTable('blockchain_events', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.string('tx_hash', 66).notNullable();
table.string('event_name', 100).notNullable();
table.string('contract_address', 42).notNullable();
table.bigInteger('block_number').notNullable();
table.integer('log_index').notNullable();
table.jsonb('args').notNullable();
table.jsonb('decoded_args');
table.string('related_entity_type', 50);
table.uuid('related_entity_id');
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.unique(['tx_hash', 'log_index'], { indexName: 'uq_event_tx_log' });
table.index('tx_hash', 'idx_event_tx');
table.index('event_name', 'idx_event_name');
table.index('contract_address', 'idx_event_contract');
table.index('block_number', 'idx_event_block');
table.index('created_at', 'idx_event_created');
table.index(['related_entity_type', 'related_entity_id'], 'idx_event_entity');
});
// Application logs table
await knex.schema.createTable('application_logs', (table) => {
table.uuid('id').primary().defaultTo(knex.raw('uuid_generate_v4()'));
table.enum('level', ['DEBUG', 'INFO', 'WARN', 'ERROR']).notNullable();
table.string('module', 100).notNullable();
table.text('message').notNullable();
table.jsonb('context');
table.text('stack_trace');
table.uuid('user_id');
table.string('correlation_id', 100);
table.string('ip_address', 45);
table.text('user_agent');
table.timestamp('created_at').notNullable().defaultTo(knex.fn.now());
table.index('level', 'idx_applog_level');
table.index('module', 'idx_applog_module');
table.index('user_id', 'idx_applog_user');
table.index('correlation_id', 'idx_applog_correlation');
table.index('created_at', 'idx_applog_created');
});
// Add additional fields to departments table
await knex.schema.alterTable('departments', (table) => {
table.text('description');
table.string('contact_email', 255);
table.string('contact_phone', 20);
table.timestamp('last_webhook_at');
});
}
export async function down(knex: Knex): Promise<void> {
// Remove additional fields from departments
await knex.schema.alterTable('departments', (table) => {
table.dropColumn('description');
table.dropColumn('contact_email');
table.dropColumn('contact_phone');
table.dropColumn('last_webhook_at');
});
await knex.schema.dropTableIfExists('application_logs');
await knex.schema.dropTableIfExists('blockchain_events');
await knex.schema.dropTableIfExists('wallets');
await knex.schema.dropTableIfExists('users');
}

View File

@@ -0,0 +1,20 @@
import { Module, Global, Provider } from '@nestjs/common';
import * as models from './models';
const modelProviders: Provider[] = Object.values(models)
.filter((model: any) =>
typeof model === 'function' &&
model.prototype &&
(model.prototype instanceof models.BaseModel || model === models.BaseModel)
)
.map((model: any) => ({
provide: model,
useValue: model,
}));
@Global()
@Module({
providers: modelProviders,
exports: modelProviders,
})
export class ModelsModule { }

View File

@@ -0,0 +1,61 @@
import { Model, RelationMappings, RelationMappingsThunk } from 'objection';
import { BaseModel } from './base.model';
export class Applicant extends BaseModel {
static tableName = 'applicants';
id!: string;
digilockerId!: string;
name!: string;
email!: string;
phone?: string;
walletAddress?: string;
isActive!: boolean;
firstName?: string;
lastName?: string;
departmentCode?: string;
lastLoginAt?: Date;
createdAt!: Date;
updatedAt!: Date;
// Relations
requests?: Model[];
static get jsonSchema() {
return {
type: 'object',
required: ['digilockerId', 'name', 'email'],
properties: {
id: { type: 'string', format: 'uuid' },
digilockerId: { type: 'string', maxLength: 255 },
name: { type: 'string', maxLength: 255 },
email: { type: 'string', format: 'email', maxLength: 255 },
phone: { type: ['string', 'null'], maxLength: 20 },
walletAddress: { type: ['string', 'null'], maxLength: 42 },
isActive: { type: 'boolean', default: true },
firstName: { type: ['string', 'null'] },
lastName: { type: ['string', 'null'] },
departmentCode: { type: ['string', 'null'] },
lastLoginAt: { type: ['string', 'null'], format: 'date-time' },
createdAt: { type: 'string', format: 'date-time' },
updatedAt: { type: 'string', format: 'date-time' },
},
};
}
static get relationMappings(): RelationMappingsThunk {
return (): RelationMappings => {
const { LicenseRequest } = require('./license-request.model');
return {
requests: {
relation: Model.HasManyRelation,
modelClass: LicenseRequest,
join: {
from: 'applicants.id',
to: 'license_requests.applicant_id',
},
},
};
};
}
}

View File

@@ -0,0 +1,37 @@
import { BaseModel } from './base.model';
export class ApplicationLog extends BaseModel {
static tableName = 'application_logs';
id!: string;
level!: 'DEBUG' | 'INFO' | 'WARN' | 'ERROR';
module!: string;
message!: string;
context?: Record<string, any>;
stackTrace?: string;
userId?: string;
correlationId?: string;
ipAddress?: string;
userAgent?: string;
createdAt!: Date;
static get jsonSchema() {
return {
type: 'object',
required: ['level', 'module', 'message'],
properties: {
id: { type: 'string', format: 'uuid' },
level: { type: 'string', enum: ['DEBUG', 'INFO', 'WARN', 'ERROR'] },
module: { type: 'string', maxLength: 100 },
message: { type: 'string' },
context: { type: ['object', 'null'] },
stackTrace: { type: ['string', 'null'] },
userId: { type: ['string', 'null'], format: 'uuid' },
correlationId: { type: ['string', 'null'], maxLength: 100 },
ipAddress: { type: ['string', 'null'], maxLength: 45 },
userAgent: { type: ['string', 'null'] },
createdAt: { type: 'string', format: 'date-time' },
},
};
}
}

View File

@@ -0,0 +1,83 @@
import { Model, RelationMappings, RelationMappingsThunk } from 'objection';
import { BaseModel } from './base.model';
import { ApprovalStatus } from '../../common/enums';
export { ApprovalStatus };
export class Approval extends BaseModel {
static tableName = 'approvals';
id!: string;
requestId!: string;
departmentId!: string;
status!: ApprovalStatus;
remarks?: string;
remarksHash?: string;
reviewedDocuments?: string[];
blockchainTxHash?: string;
isActive!: boolean;
invalidatedAt?: Date;
invalidationReason?: string;
revalidatedAt?: Date;
approvedBy?: string;
rejectionReason?: string;
requiredDocuments?: string[];
completedAt?: Date;
createdAt!: Date;
updatedAt!: Date;
// Relations
request?: Model;
department?: Model;
static get jsonSchema() {
return {
type: 'object',
required: ['requestId', 'departmentId'],
properties: {
id: { type: 'string', format: 'uuid' },
requestId: { type: 'string', format: 'uuid' },
departmentId: { type: 'string', format: 'uuid' },
status: { type: 'string', maxLength: 50, default: 'PENDING' },
remarks: { type: ['string', 'null'] },
remarksHash: { type: ['string', 'null'], maxLength: 66 },
reviewedDocuments: { type: ['array', 'null'], items: { type: 'string' } },
blockchainTxHash: { type: ['string', 'null'], maxLength: 66 },
isActive: { type: 'boolean', default: true },
invalidatedAt: { type: ['string', 'null'], format: 'date-time' },
invalidationReason: { type: ['string', 'null'], maxLength: 255 },
revalidatedAt: { type: ['string', 'null'], format: 'date-time' },
approvedBy: { type: ['string', 'null'] },
rejectionReason: { type: ['string', 'null'] },
requiredDocuments: { type: ['array', 'null'], items: { type: 'string' } },
completedAt: { type: ['string', 'null'], format: 'date-time' },
createdAt: { type: 'string', format: 'date-time' },
updatedAt: { type: 'string', format: 'date-time' },
},
};
}
static get relationMappings(): RelationMappingsThunk {
return (): RelationMappings => {
const { LicenseRequest } = require('./license-request.model');
const { Department } = require('./department.model');
return {
request: {
relation: Model.BelongsToOneRelation,
modelClass: LicenseRequest,
join: {
from: 'approvals.request_id',
to: 'license_requests.id',
},
},
department: {
relation: Model.BelongsToOneRelation,
modelClass: Department,
join: {
from: 'approvals.department_id',
to: 'departments.id',
},
},
};
};
}
}

View File

@@ -0,0 +1,49 @@
import { Model, QueryContext } from 'objection';
import { v4 as uuidv4 } from 'uuid';
import { BaseModel } from './base.model';
export class AuditLog extends BaseModel {
static tableName = 'audit_logs';
id!: string;
entityType!: string;
entityId!: string;
action!: string;
actorType!: string;
actorId?: string;
oldValue?: Record<string, unknown>;
newValue?: Record<string, unknown>;
ipAddress?: string;
userAgent?: string;
correlationId?: string;
createdAt!: Date;
async $beforeInsert(queryContext: QueryContext): Promise<void> {
await super.$beforeInsert(queryContext);
if (!this.id) {
this.id = uuidv4();
}
this.createdAt = new Date();
}
static get jsonSchema() {
return {
type: 'object',
required: ['entityType', 'entityId', 'action', 'actorType'],
properties: {
id: { type: 'string', format: 'uuid' },
entityType: { type: 'string', maxLength: 50 },
entityId: { type: 'string', format: 'uuid' },
action: { type: 'string', maxLength: 50 },
actorType: { type: 'string', maxLength: 50 },
actorId: { type: ['string', 'null'], format: 'uuid' },
oldValue: { type: ['object', 'null'] },
newValue: { type: ['object', 'null'] },
ipAddress: { type: ['string', 'null'], maxLength: 45 },
userAgent: { type: ['string', 'null'] },
correlationId: { type: ['string', 'null'], maxLength: 100 },
createdAt: { type: 'string', format: 'date-time' },
},
};
}
}

View File

@@ -0,0 +1,34 @@
import { Model, ModelOptions, QueryContext, snakeCaseMappers } from 'objection';
import { v4 as uuidv4 } from 'uuid';
export class BaseModel extends Model {
id!: string;
createdAt!: Date;
updatedAt!: Date;
static get columnNameMappers() {
return snakeCaseMappers();
}
static get modelPaths(): string[] {
return [__dirname];
}
async $beforeInsert(queryContext: QueryContext): Promise<void> {
await super.$beforeInsert(queryContext);
if (!this.id) {
this.id = uuidv4();
}
this.createdAt = new Date();
this.updatedAt = new Date();
}
async $beforeUpdate(opt: ModelOptions, queryContext: QueryContext): Promise<void> {
await super.$beforeUpdate(opt, queryContext);
this.updatedAt = new Date();
}
static get useLimitInFirst(): boolean {
return true;
}
}

View File

@@ -0,0 +1,37 @@
import { BaseModel } from './base.model';
export class BlockchainEvent extends BaseModel {
static tableName = 'blockchain_events';
id!: string;
txHash!: string;
eventName!: string;
contractAddress!: string;
blockNumber!: number;
logIndex!: number;
args!: Record<string, any>;
decodedArgs?: Record<string, any>;
relatedEntityType?: string;
relatedEntityId?: string;
createdAt!: Date;
static get jsonSchema() {
return {
type: 'object',
required: ['txHash', 'eventName', 'contractAddress', 'blockNumber', 'logIndex', 'args'],
properties: {
id: { type: 'string', format: 'uuid' },
txHash: { type: 'string', maxLength: 66 },
eventName: { type: 'string', maxLength: 100 },
contractAddress: { type: 'string', maxLength: 42 },
blockNumber: { type: 'integer' },
logIndex: { type: 'integer' },
args: { type: 'object' },
decodedArgs: { type: ['object', 'null'] },
relatedEntityType: { type: ['string', 'null'], maxLength: 50 },
relatedEntityId: { type: ['string', 'null'], format: 'uuid' },
createdAt: { type: 'string', format: 'date-time' },
},
};
}
}

View File

@@ -0,0 +1,52 @@
import { Model, QueryContext } from 'objection';
import { v4 as uuidv4 } from 'uuid';
import { TransactionType, TransactionStatus } from '../../common/enums';
import { BaseModel } from './base.model';
export class BlockchainTransaction extends BaseModel {
static tableName = 'blockchain_transactions';
id!: string;
txHash!: string;
txType!: TransactionType;
relatedEntityType!: string;
relatedEntityId!: string;
fromAddress!: string;
toAddress?: string;
status!: TransactionStatus;
blockNumber?: string;
gasUsed?: string;
errorMessage?: string;
createdAt!: Date;
confirmedAt?: Date;
async $beforeInsert(queryContext: QueryContext): Promise<void> {
await super.$beforeInsert(queryContext);
if (!this.id) {
this.id = uuidv4();
}
this.createdAt = new Date();
}
static get jsonSchema() {
return {
type: 'object',
required: ['txHash', 'txType', 'relatedEntityType', 'relatedEntityId', 'fromAddress'],
properties: {
id: { type: 'string', format: 'uuid' },
txHash: { type: 'string', maxLength: 66 },
txType: { type: 'string', maxLength: 50 },
relatedEntityType: { type: 'string', maxLength: 50 },
relatedEntityId: { type: 'string', format: 'uuid' },
fromAddress: { type: 'string', maxLength: 42 },
toAddress: { type: ['string', 'null'], maxLength: 42 },
status: { type: 'string', maxLength: 20, default: 'PENDING' },
blockNumber: { type: ['string', 'null'] },
gasUsed: { type: ['string', 'null'] },
errorMessage: { type: ['string', 'null'] },
createdAt: { type: 'string', format: 'date-time' },
confirmedAt: { type: ['string', 'null'], format: 'date-time' },
},
};
}
}

View File

@@ -0,0 +1,75 @@
import { Model, RelationMappings, RelationMappingsThunk } from 'objection';
import { BaseModel } from './base.model';
export class Department extends BaseModel {
static tableName = 'departments';
id!: string;
code!: string;
name!: string;
walletAddress?: string;
apiKeyHash?: string;
apiSecretHash?: string;
webhookUrl?: string;
webhookSecretHash?: string;
isActive!: boolean;
description?: string;
contactEmail?: string;
contactPhone?: string;
lastWebhookAt?: Date;
createdAt!: Date;
updatedAt!: Date;
// Relations
approvals?: Model[];
webhooks?: Model[];
static get jsonSchema() {
return {
type: 'object',
required: ['code', 'name'],
properties: {
id: { type: 'string', format: 'uuid' },
code: { type: 'string', maxLength: 50 },
name: { type: 'string', maxLength: 255 },
walletAddress: { type: ['string', 'null'], maxLength: 42 },
apiKeyHash: { type: ['string', 'null'], maxLength: 255 },
apiSecretHash: { type: ['string', 'null'], maxLength: 255 },
webhookUrl: { type: ['string', 'null'], maxLength: 500 },
webhookSecretHash: { type: ['string', 'null'], maxLength: 255 },
isActive: { type: 'boolean', default: true },
description: { type: ['string', 'null'] },
contactEmail: { type: ['string', 'null'] },
contactPhone: { type: ['string', 'null'] },
lastWebhookAt: { type: ['string', 'null'], format: 'date-time' },
createdAt: { type: 'string', format: 'date-time' },
updatedAt: { type: 'string', format: 'date-time' },
},
};
}
static get relationMappings(): RelationMappingsThunk {
return (): RelationMappings => {
const { Approval } = require('./approval.model');
const { Webhook } = require('./webhook.model');
return {
approvals: {
relation: Model.HasManyRelation,
modelClass: Approval,
join: {
from: 'departments.id',
to: 'approvals.department_id',
},
},
webhooks: {
relation: Model.HasManyRelation,
modelClass: Webhook,
join: {
from: 'departments.id',
to: 'webhooks.department_id',
},
},
};
};
}
}

View File

@@ -0,0 +1,55 @@
import { Model, RelationMappings, RelationMappingsThunk } from 'objection';
import { BaseModel } from './base.model';
export class DocumentVersion extends BaseModel {
static tableName = 'document_versions';
id!: string;
documentId!: string;
version!: number;
hash!: string;
minioPath!: string;
fileSize!: string;
mimeType!: string;
uploadedBy!: string;
blockchainTxHash?: string;
createdAt!: Date;
// Relations
document?: Model;
static get jsonSchema() {
return {
type: 'object',
required: ['documentId', 'version', 'hash', 'minioPath', 'fileSize', 'mimeType', 'uploadedBy'],
properties: {
id: { type: 'string', format: 'uuid' },
documentId: { type: 'string', format: 'uuid' },
version: { type: 'integer' },
hash: { type: 'string', maxLength: 66 },
minioPath: { type: 'string', maxLength: 500 },
fileSize: { type: 'string' },
mimeType: { type: 'string', maxLength: 100 },
uploadedBy: { type: 'string', format: 'uuid' },
blockchainTxHash: { type: ['string', 'null'], maxLength: 66 },
createdAt: { type: 'string', format: 'date-time' },
},
};
}
static get relationMappings(): RelationMappingsThunk {
return (): RelationMappings => {
const { Document } = require('./document.model');
return {
document: {
relation: Model.BelongsToOneRelation,
modelClass: Document,
join: {
from: 'document_versions.document_id',
to: 'documents.id',
},
},
};
};
}
}

View File

@@ -0,0 +1,69 @@
import { Model, RelationMappings, RelationMappingsThunk } from 'objection';
import { BaseModel } from './base.model';
export class Document extends BaseModel {
static tableName = 'documents';
id!: string;
requestId!: string;
docType!: string;
originalFilename!: string;
currentVersion!: number;
currentHash!: string;
minioBucket!: string;
isActive!: boolean;
downloadCount!: number;
lastDownloadedAt?: string;
createdAt!: Date;
updatedAt!: Date;
// Relations
request?: Model;
versions?: Model[];
static get jsonSchema() {
return {
type: 'object',
required: ['requestId', 'docType', 'originalFilename', 'currentHash', 'minioBucket'],
properties: {
id: { type: 'string', format: 'uuid' },
requestId: { type: 'string', format: 'uuid' },
docType: { type: 'string', maxLength: 100 },
originalFilename: { type: 'string', maxLength: 255 },
currentVersion: { type: 'integer', default: 1 },
currentHash: { type: 'string', maxLength: 66 },
minioBucket: { type: 'string', maxLength: 100 },
isActive: { type: 'boolean', default: true },
downloadCount: { type: 'integer', default: 0 },
lastDownloadedAt: { type: ['string', 'null'], format: 'date-time' },
createdAt: { type: 'string', format: 'date-time' },
updatedAt: { type: 'string', format: 'date-time' },
},
};
}
static get relationMappings(): RelationMappingsThunk {
return (): RelationMappings => {
const { LicenseRequest } = require('./license-request.model');
const { DocumentVersion } = require('./document-version.model');
return {
request: {
relation: Model.BelongsToOneRelation,
modelClass: LicenseRequest,
join: {
from: 'documents.request_id',
to: 'license_requests.id',
},
},
versions: {
relation: Model.HasManyRelation,
modelClass: DocumentVersion,
join: {
from: 'documents.id',
to: 'document_versions.document_id',
},
},
};
};
}
}

View File

@@ -0,0 +1,17 @@
export * from './base.model';
export * from './applicant.model';
export * from './department.model';
export * from './license-request.model';
export * from './document.model';
export * from './document-version.model';
export * from './approval.model';
export * from './workflow.model';
export * from './workflow-state.model';
export * from './webhook.model';
export * from './webhook-log.model';
export * from './audit-log.model';
export * from './blockchain-transaction.model';
export * from './user.model';
export * from './wallet.model';
export * from './blockchain-event.model';
export * from './application-log.model';

View File

@@ -0,0 +1,108 @@
import { Model, RelationMappings, RelationMappingsThunk } from 'objection';
import { BaseModel } from './base.model';
import { RequestStatus, RequestType } from '../../common/enums';
export const LicenseRequestStatus = RequestStatus;
export type LicenseRequestStatus = RequestStatus;
export class LicenseRequest extends BaseModel {
static tableName = 'license_requests';
id!: string;
requestNumber!: string;
tokenId?: string;
applicantId!: string;
requestType!: RequestType;
workflowId?: string;
status!: RequestStatus;
metadata?: Record<string, unknown>;
currentStageId?: string;
blockchainTxHash?: string;
createdAt!: Date;
updatedAt!: Date;
submittedAt?: Date;
approvedAt?: Date;
// Relations
applicant?: Model;
workflow?: Model;
documents?: Model[];
approvals?: Model[];
workflowState?: Model;
static get jsonSchema() {
return {
type: 'object',
required: ['requestNumber', 'applicantId', 'requestType'],
properties: {
id: { type: 'string', format: 'uuid' },
requestNumber: { type: 'string', maxLength: 50 },
tokenId: { type: ['string', 'null'] },
applicantId: { type: 'string', format: 'uuid' },
requestType: { type: 'string', maxLength: 100 },
workflowId: { type: ['string', 'null'], format: 'uuid' },
status: { type: 'string', maxLength: 50, default: 'DRAFT' },
metadata: { type: ['object', 'null'] },
currentStageId: { type: ['string', 'null'], maxLength: 100 },
blockchainTxHash: { type: ['string', 'null'], maxLength: 66 },
createdAt: { type: 'string', format: 'date-time' },
updatedAt: { type: 'string', format: 'date-time' },
submittedAt: { type: ['string', 'null'], format: 'date-time' },
approvedAt: { type: ['string', 'null'], format: 'date-time' },
},
};
}
static get relationMappings(): RelationMappingsThunk {
return (): RelationMappings => {
const { Applicant } = require('./applicant.model');
const { Workflow } = require('./workflow.model');
const { Document } = require('./document.model');
const { Approval } = require('./approval.model');
const { WorkflowState } = require('./workflow-state.model');
return {
applicant: {
relation: Model.BelongsToOneRelation,
modelClass: Applicant,
join: {
from: 'license_requests.applicant_id',
to: 'applicants.id',
},
},
workflow: {
relation: Model.BelongsToOneRelation,
modelClass: Workflow,
join: {
from: 'license_requests.workflow_id',
to: 'workflows.id',
},
},
documents: {
relation: Model.HasManyRelation,
modelClass: Document,
join: {
from: 'license_requests.id',
to: 'documents.request_id',
},
},
approvals: {
relation: Model.HasManyRelation,
modelClass: Approval,
join: {
from: 'license_requests.id',
to: 'approvals.request_id',
},
},
workflowState: {
relation: Model.HasOneRelation,
modelClass: WorkflowState,
join: {
from: 'license_requests.id',
to: 'workflow_states.request_id',
},
},
};
};
}
}

View File

@@ -0,0 +1,61 @@
import { Model, RelationMappings, RelationMappingsThunk } from 'objection';
import { BaseModel } from './base.model';
export class User extends BaseModel {
static tableName = 'users';
id!: string;
email!: string;
passwordHash!: string;
name!: string;
role!: 'ADMIN' | 'DEPARTMENT' | 'CITIZEN';
departmentId?: string;
walletAddress?: string;
walletEncryptedKey?: string;
phone?: string;
isActive!: boolean;
lastLoginAt?: Date;
createdAt!: Date;
updatedAt!: Date;
// Relations
department?: Model;
static get jsonSchema() {
return {
type: 'object',
required: ['email', 'passwordHash', 'name', 'role'],
properties: {
id: { type: 'string', format: 'uuid' },
email: { type: 'string', format: 'email', maxLength: 255 },
passwordHash: { type: 'string', maxLength: 255 },
name: { type: 'string', maxLength: 255 },
role: { type: 'string', enum: ['ADMIN', 'DEPARTMENT', 'CITIZEN'] },
departmentId: { type: ['string', 'null'], format: 'uuid' },
walletAddress: { type: ['string', 'null'], maxLength: 42 },
walletEncryptedKey: { type: ['string', 'null'] },
phone: { type: ['string', 'null'], maxLength: 20 },
isActive: { type: 'boolean', default: true },
lastLoginAt: { type: ['string', 'null'], format: 'date-time' },
createdAt: { type: 'string', format: 'date-time' },
updatedAt: { type: 'string', format: 'date-time' },
},
};
}
static get relationMappings(): RelationMappingsThunk {
return (): RelationMappings => {
const { Department } = require('./department.model');
return {
department: {
relation: Model.BelongsToOneRelation,
modelClass: Department,
join: {
from: 'users.department_id',
to: 'departments.id',
},
},
};
};
}
}

View File

@@ -0,0 +1,32 @@
import { Model, RelationMappings, RelationMappingsThunk } from 'objection';
import { BaseModel } from './base.model';
export class Wallet extends BaseModel {
static tableName = 'wallets';
id!: string;
address!: string;
encryptedPrivateKey!: string;
ownerType!: 'USER' | 'DEPARTMENT';
ownerId!: string;
isActive!: boolean;
createdAt!: Date;
updatedAt!: Date;
static get jsonSchema() {
return {
type: 'object',
required: ['address', 'encryptedPrivateKey', 'ownerType', 'ownerId'],
properties: {
id: { type: 'string', format: 'uuid' },
address: { type: 'string', maxLength: 42 },
encryptedPrivateKey: { type: 'string' },
ownerType: { type: 'string', enum: ['USER', 'DEPARTMENT'] },
ownerId: { type: 'string', format: 'uuid' },
isActive: { type: 'boolean', default: true },
createdAt: { type: 'string', format: 'date-time' },
updatedAt: { type: 'string', format: 'date-time' },
},
};
}
}

View File

@@ -0,0 +1,68 @@
import { Model, RelationMappings, RelationMappingsThunk, QueryContext } from 'objection';
import { v4 as uuidv4 } from 'uuid';
import { WebhookDeliveryStatus } from '../../common/enums';
import { BaseModel } from './base.model';
export const WebhookLogStatus = WebhookDeliveryStatus;
export type WebhookLogStatus = WebhookDeliveryStatus;
export class WebhookLog extends BaseModel {
static tableName = 'webhook_logs';
id!: string;
webhookId!: string;
eventType!: string;
payload!: Record<string, unknown>;
responseStatus?: number;
responseBody?: string;
responseTime?: number;
retryCount!: number;
status!: WebhookDeliveryStatus;
createdAt!: Date;
// Relations
webhook?: Model;
async $beforeInsert(queryContext: QueryContext): Promise<void> {
await super.$beforeInsert(queryContext);
if (!this.id) {
this.id = uuidv4();
}
this.createdAt = new Date();
}
static get jsonSchema() {
return {
type: 'object',
required: ['webhookId', 'eventType', 'payload'],
properties: {
id: { type: 'string', format: 'uuid' },
webhookId: { type: 'string', format: 'uuid' },
eventType: { type: 'string', maxLength: 100 },
payload: { type: 'object' },
responseStatus: { type: ['integer', 'null'] },
responseBody: { type: ['string', 'null'] },
responseTime: { type: ['integer', 'null'] },
retryCount: { type: 'integer', default: 0 },
status: { type: 'string', maxLength: 20, default: 'PENDING' },
createdAt: { type: 'string', format: 'date-time' },
},
};
}
static get relationMappings(): RelationMappingsThunk {
return (): RelationMappings => {
const { Webhook } = require('./webhook.model');
return {
webhook: {
relation: Model.BelongsToOneRelation,
modelClass: Webhook,
join: {
from: 'webhook_logs.webhook_id',
to: 'webhooks.id',
},
},
};
};
}
}

View File

@@ -0,0 +1,61 @@
import { Model, RelationMappings, RelationMappingsThunk } from 'objection';
import { BaseModel } from './base.model';
export class Webhook extends BaseModel {
static tableName = 'webhooks';
id!: string;
departmentId!: string;
url!: string;
events!: string[];
secretHash!: string;
isActive!: boolean;
createdAt!: Date;
updatedAt!: Date;
// Relations
department?: Model;
logs?: Model[];
static get jsonSchema() {
return {
type: 'object',
required: ['departmentId', 'url', 'events', 'secretHash'],
properties: {
id: { type: 'string', format: 'uuid' },
departmentId: { type: 'string', format: 'uuid' },
url: { type: 'string', maxLength: 500 },
events: { type: 'array', items: { type: 'string' } },
secretHash: { type: 'string', maxLength: 255 },
isActive: { type: 'boolean', default: true },
createdAt: { type: 'string', format: 'date-time' },
updatedAt: { type: 'string', format: 'date-time' },
},
};
}
static get relationMappings(): RelationMappingsThunk {
return (): RelationMappings => {
const { Department } = require('./department.model');
const { WebhookLog } = require('./webhook-log.model');
return {
department: {
relation: Model.BelongsToOneRelation,
modelClass: Department,
join: {
from: 'webhooks.department_id',
to: 'departments.id',
},
},
logs: {
relation: Model.HasManyRelation,
modelClass: WebhookLog,
join: {
from: 'webhooks.id',
to: 'webhook_logs.webhook_id',
},
},
};
};
}
}

Some files were not shown because too many files have changed in this diff Show More