feat: Goa GEL Blockchain e-Licensing Platform - Full Stack Implementation

Complete implementation of the Goa Government e-Licensing platform with:

Backend:
- NestJS API with JWT authentication
- PostgreSQL database with Knex ORM
- Redis caching and session management
- MinIO document storage
- Hyperledger Besu blockchain integration
- Multi-department workflow system
- Comprehensive API tests (266/282 passing)

Frontend:
- Angular 21 with standalone components
- Angular Material + TailwindCSS UI
- Visual workflow builder
- Document upload with progress tracking
- Blockchain explorer integration
- Role-based dashboards (Admin, Department, Citizen)
- E2E tests with Playwright (37 tests)

Infrastructure:
- Docker Compose orchestration
- Blockscout blockchain explorer
- Development and production configurations
This commit is contained in:
Mahi
2026-02-07 10:23:29 -04:00
commit 80566bf0a2
441 changed files with 102418 additions and 0 deletions

5
blockchain/.env.example Normal file
View File

@@ -0,0 +1,5 @@
# Deployer private key (Hardhat default account #0)
DEPLOYER_PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
# Besu RPC URL
BESU_RPC_URL=http://localhost:8545

8
blockchain/.gitignore vendored Normal file
View File

@@ -0,0 +1,8 @@
node_modules
.env
cache
artifacts
typechain-types
coverage
coverage.json
deployments/*.json

View File

@@ -0,0 +1,182 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.20;
import "@openzeppelin/contracts/access/Ownable.sol";
/**
* @title ApprovalManager
* @notice Manages and records approval actions on the blockchain
* @dev Provides immutable audit trail for license approvals
*/
contract ApprovalManager is Ownable {
enum ApprovalStatus {
PENDING,
APPROVED,
REJECTED,
CHANGES_REQUESTED,
INVALIDATED
}
struct Approval {
bytes32 id;
string requestId;
address departmentAddress;
ApprovalStatus status;
string remarksHash;
string[] documentHashes;
uint256 timestamp;
bool isValid;
}
// Mapping from approval ID to Approval struct
mapping(bytes32 => Approval) private _approvals;
// Mapping from request ID to approval IDs
mapping(string => bytes32[]) private _requestApprovals;
// Counter for generating unique approval IDs
uint256 private _approvalCounter;
// Events
event ApprovalRecorded(
bytes32 indexed approvalId,
string indexed requestId,
address indexed departmentAddress,
ApprovalStatus status,
uint256 timestamp
);
event ApprovalInvalidated(
bytes32 indexed approvalId,
string reason
);
constructor() Ownable(msg.sender) {}
/**
* @notice Record an approval action
* @param requestId The license request ID
* @param departmentAddress The address of the approving department
* @param status The approval status
* @param remarksHash Hash of the approval remarks
* @param documentHashes Array of document hashes that were reviewed
* @return approvalId The unique ID of the recorded approval
*/
function recordApproval(
string calldata requestId,
address departmentAddress,
ApprovalStatus status,
string calldata remarksHash,
string[] calldata documentHashes
) public onlyOwner returns (bytes32) {
require(bytes(requestId).length > 0, "Request ID required");
require(departmentAddress != address(0), "Invalid department address");
_approvalCounter++;
bytes32 approvalId = keccak256(
abi.encodePacked(requestId, departmentAddress, block.timestamp, _approvalCounter)
);
Approval storage approval = _approvals[approvalId];
approval.id = approvalId;
approval.requestId = requestId;
approval.departmentAddress = departmentAddress;
approval.status = status;
approval.remarksHash = remarksHash;
approval.timestamp = block.timestamp;
approval.isValid = true;
// Copy document hashes using loop (calldata to storage)
for (uint256 i = 0; i < documentHashes.length; i++) {
approval.documentHashes.push(documentHashes[i]);
}
_requestApprovals[requestId].push(approvalId);
emit ApprovalRecorded(
approvalId,
requestId,
departmentAddress,
status,
block.timestamp
);
return approvalId;
}
/**
* @notice Get all approvals for a request
* @param requestId The license request ID
* @return Array of Approval structs
*/
function getRequestApprovals(string calldata requestId)
public
view
returns (Approval[] memory)
{
bytes32[] memory approvalIds = _requestApprovals[requestId];
Approval[] memory approvals = new Approval[](approvalIds.length);
for (uint256 i = 0; i < approvalIds.length; i++) {
approvals[i] = _approvals[approvalIds[i]];
}
return approvals;
}
/**
* @notice Invalidate an existing approval
* @param approvalId The approval ID to invalidate
* @param reason The reason for invalidation
*/
function invalidateApproval(bytes32 approvalId, string calldata reason)
public
onlyOwner
{
require(_approvals[approvalId].isValid, "Approval not found or already invalid");
_approvals[approvalId].isValid = false;
_approvals[approvalId].status = ApprovalStatus.INVALIDATED;
emit ApprovalInvalidated(approvalId, reason);
}
/**
* @notice Verify if approval remarks match the stored hash
* @param approvalId The approval ID
* @param remarksHash The hash to verify
* @return True if the hashes match
*/
function verifyApproval(bytes32 approvalId, string calldata remarksHash)
public
view
returns (bool)
{
Approval memory approval = _approvals[approvalId];
return approval.isValid &&
keccak256(bytes(approval.remarksHash)) == keccak256(bytes(remarksHash));
}
/**
* @notice Get details of a specific approval
* @param approvalId The approval ID
* @return The Approval struct
*/
function getApprovalDetails(bytes32 approvalId)
public
view
returns (Approval memory)
{
require(_approvals[approvalId].timestamp > 0, "Approval not found");
return _approvals[approvalId];
}
/**
* @notice Get the count of approvals for a request
* @param requestId The license request ID
* @return The number of approvals
*/
function getApprovalCount(string calldata requestId) public view returns (uint256) {
return _requestApprovals[requestId].length;
}
}

View File

@@ -0,0 +1,3 @@
<claude-mem-context>
</claude-mem-context>

View File

@@ -0,0 +1,193 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.20;
import "@openzeppelin/contracts/access/Ownable.sol";
/**
* @title DocumentChain
* @notice Records and verifies document hashes on the blockchain
* @dev Provides tamper-proof document verification
*/
contract DocumentChain is Ownable {
struct DocumentRecord {
bytes32 id;
string requestId;
string documentId;
string hash;
uint256 version;
uint256 timestamp;
address uploadedBy;
}
// Mapping from document ID to its records
mapping(string => DocumentRecord[]) private _documentHistory;
// Mapping from document ID to latest hash
mapping(string => string) private _latestHashes;
// Mapping from hash to document ID (for reverse lookup)
mapping(string => string) private _hashToDocument;
// Counter for unique record IDs
uint256 private _recordCounter;
// Events
event DocumentRecorded(
bytes32 indexed recordId,
string indexed requestId,
string documentId,
string hash,
uint256 version,
uint256 timestamp
);
constructor() Ownable(msg.sender) {}
/**
* @notice Record a document hash
* @param requestId The license request ID
* @param documentId The document ID
* @param hash The document hash (e.g., SHA-256)
* @param version The document version
* @return recordId The unique record ID
*/
function recordDocumentHash(
string calldata requestId,
string calldata documentId,
string calldata hash,
uint256 version
) public onlyOwner returns (bytes32) {
require(bytes(requestId).length > 0, "Request ID required");
require(bytes(documentId).length > 0, "Document ID required");
require(bytes(hash).length > 0, "Hash required");
_recordCounter++;
bytes32 recordId = keccak256(
abi.encodePacked(documentId, version, block.timestamp, _recordCounter)
);
DocumentRecord memory record = DocumentRecord({
id: recordId,
requestId: requestId,
documentId: documentId,
hash: hash,
version: version,
timestamp: block.timestamp,
uploadedBy: msg.sender
});
_documentHistory[documentId].push(record);
_latestHashes[documentId] = hash;
_hashToDocument[hash] = documentId;
emit DocumentRecorded(
recordId,
requestId,
documentId,
hash,
version,
block.timestamp
);
return recordId;
}
/**
* @notice Verify if a document hash exists
* @param documentId The document ID
* @param hash The hash to verify
* @return True if the hash matches any recorded version
*/
function verifyDocumentHash(string calldata documentId, string calldata hash)
public
view
returns (bool)
{
DocumentRecord[] memory history = _documentHistory[documentId];
for (uint256 i = 0; i < history.length; i++) {
if (keccak256(bytes(history[i].hash)) == keccak256(bytes(hash))) {
return true;
}
}
return false;
}
/**
* @notice Verify if a hash is the latest version
* @param documentId The document ID
* @param hash The hash to verify
* @return True if the hash is the latest version
*/
function verifyLatestHash(string calldata documentId, string calldata hash)
public
view
returns (bool)
{
return keccak256(bytes(_latestHashes[documentId])) == keccak256(bytes(hash));
}
/**
* @notice Get the complete history of a document
* @param documentId The document ID
* @return Array of DocumentRecord structs
*/
function getDocumentHistory(string calldata documentId)
public
view
returns (DocumentRecord[] memory)
{
return _documentHistory[documentId];
}
/**
* @notice Get the latest hash for a document
* @param documentId The document ID
* @return The latest document hash
*/
function getLatestDocumentHash(string calldata documentId)
public
view
returns (string memory)
{
return _latestHashes[documentId];
}
/**
* @notice Get document ID by hash
* @param hash The document hash
* @return The document ID
*/
function getDocumentByHash(string calldata hash)
public
view
returns (string memory)
{
return _hashToDocument[hash];
}
/**
* @notice Get the version count for a document
* @param documentId The document ID
* @return The number of versions
*/
function getVersionCount(string calldata documentId) public view returns (uint256) {
return _documentHistory[documentId].length;
}
/**
* @notice Get a specific version of a document
* @param documentId The document ID
* @param version The version number (1-indexed)
* @return The DocumentRecord for that version
*/
function getDocumentVersion(string calldata documentId, uint256 version)
public
view
returns (DocumentRecord memory)
{
require(version > 0 && version <= _documentHistory[documentId].length, "Invalid version");
return _documentHistory[documentId][version - 1];
}
}

View File

@@ -0,0 +1,172 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.20;
import "@openzeppelin/contracts/token/ERC721/ERC721.sol";
import "@openzeppelin/contracts/token/ERC721/extensions/ERC721URIStorage.sol";
import "@openzeppelin/contracts/access/Ownable.sol";
/**
* @title LicenseNFT
* @notice ERC721 token representing government-issued licenses
* @dev Each license is minted as an NFT with associated metadata
*/
contract LicenseNFT is ERC721, ERC721URIStorage, Ownable {
uint256 private _nextTokenId;
// Mapping from request ID to token ID
mapping(string => uint256) private _requestToToken;
// Mapping from token ID to request ID
mapping(uint256 => string) private _tokenToRequest;
// Mapping to track revoked licenses
mapping(uint256 => bool) private _revokedTokens;
// Mapping to store revocation reasons
mapping(uint256 => string) private _revocationReasons;
// Mapping to store license metadata URI
mapping(uint256 => string) private _metadataUris;
// Events
event LicenseMinted(
uint256 indexed tokenId,
address indexed to,
string requestId,
string metadataUri
);
event LicenseRevoked(
uint256 indexed tokenId,
string reason
);
constructor() ERC721("Goa Government License", "GOA-LIC") Ownable(msg.sender) {}
/**
* @notice Mint a new license NFT
* @param to The address to mint the token to
* @param requestId The associated license request ID
* @param metadataUri The URI containing license metadata
* @return tokenId The ID of the newly minted token
*/
function mint(
address to,
string calldata requestId,
string calldata metadataUri
) public onlyOwner returns (uint256) {
require(bytes(requestId).length > 0, "Request ID required");
require(_requestToToken[requestId] == 0, "License already minted for this request");
uint256 tokenId = ++_nextTokenId;
_safeMint(to, tokenId);
_setTokenURI(tokenId, metadataUri);
_requestToToken[requestId] = tokenId;
_tokenToRequest[tokenId] = requestId;
_metadataUris[tokenId] = metadataUri;
emit LicenseMinted(tokenId, to, requestId, metadataUri);
return tokenId;
}
/**
* @notice Get the token ID for a request
* @param requestId The license request ID
* @return The token ID (0 if not found)
*/
function tokenOfRequest(string calldata requestId) public view returns (uint256) {
return _requestToToken[requestId];
}
/**
* @notice Get the request ID for a token
* @param tokenId The token ID
* @return The request ID
*/
function requestOfToken(uint256 tokenId) public view returns (string memory) {
require(_ownerOf(tokenId) != address(0), "Token does not exist");
return _tokenToRequest[tokenId];
}
/**
* @notice Check if a token exists
* @param tokenId The token ID to check
* @return True if the token exists
*/
function exists(uint256 tokenId) public view returns (bool) {
return _ownerOf(tokenId) != address(0);
}
/**
* @notice Revoke a license
* @param tokenId The token ID to revoke
* @param reason The reason for revocation
*/
function revoke(uint256 tokenId, string calldata reason) public onlyOwner {
require(_ownerOf(tokenId) != address(0), "Token does not exist");
require(!_revokedTokens[tokenId], "License already revoked");
_revokedTokens[tokenId] = true;
_revocationReasons[tokenId] = reason;
emit LicenseRevoked(tokenId, reason);
}
/**
* @notice Check if a license is revoked
* @param tokenId The token ID to check
* @return True if the license is revoked
*/
function isRevoked(uint256 tokenId) public view returns (bool) {
return _revokedTokens[tokenId];
}
/**
* @notice Get the revocation reason for a license
* @param tokenId The token ID
* @return The revocation reason
*/
function getRevocationReason(uint256 tokenId) public view returns (string memory) {
return _revocationReasons[tokenId];
}
/**
* @notice Get the metadata URI for a token
* @param tokenId The token ID
* @return The metadata URI
*/
function getMetadata(uint256 tokenId) public view returns (string memory) {
require(_ownerOf(tokenId) != address(0), "Token does not exist");
return _metadataUris[tokenId];
}
/**
* @notice Get the total number of minted licenses
* @return The total count
*/
function totalSupply() public view returns (uint256) {
return _nextTokenId;
}
// Override required functions
function tokenURI(uint256 tokenId)
public
view
override(ERC721, ERC721URIStorage)
returns (string memory)
{
return super.tokenURI(tokenId);
}
function supportsInterface(bytes4 interfaceId)
public
view
override(ERC721, ERC721URIStorage)
returns (bool)
{
return super.supportsInterface(interfaceId);
}
}

View File

@@ -0,0 +1,188 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.20;
import "@openzeppelin/contracts/access/Ownable.sol";
/**
* @title WorkflowRegistry
* @notice Registers and tracks workflow definitions on-chain
* @dev Placeholder for future workflow verification capabilities
*/
contract WorkflowRegistry is Ownable {
struct WorkflowDefinition {
bytes32 id;
string workflowType;
string name;
bytes32 definitionHash;
uint256 version;
uint256 timestamp;
bool isActive;
}
// Mapping from workflow ID to definition
mapping(bytes32 => WorkflowDefinition) private _workflows;
// Mapping from workflow type to latest workflow ID
mapping(string => bytes32) private _latestWorkflows;
// Array of all workflow IDs
bytes32[] private _workflowIds;
// Counter for unique IDs
uint256 private _workflowCounter;
// Events
event WorkflowRegistered(
bytes32 indexed workflowId,
string indexed workflowType,
string name,
bytes32 definitionHash,
uint256 version
);
event WorkflowDeactivated(bytes32 indexed workflowId);
event WorkflowActivated(bytes32 indexed workflowId);
constructor() Ownable(msg.sender) {}
/**
* @notice Register a new workflow definition
* @param workflowType The type of workflow (e.g., "RESORT_LICENSE")
* @param name Human-readable name
* @param definitionHash Hash of the workflow definition JSON
* @return workflowId The unique workflow ID
*/
function registerWorkflow(
string calldata workflowType,
string calldata name,
bytes32 definitionHash
) public onlyOwner returns (bytes32) {
require(bytes(workflowType).length > 0, "Workflow type required");
require(bytes(name).length > 0, "Name required");
require(definitionHash != bytes32(0), "Definition hash required");
_workflowCounter++;
bytes32 workflowId = keccak256(
abi.encodePacked(workflowType, _workflowCounter, block.timestamp)
);
// Determine version
uint256 version = 1;
bytes32 latestId = _latestWorkflows[workflowType];
if (latestId != bytes32(0)) {
version = _workflows[latestId].version + 1;
// Deactivate previous version
_workflows[latestId].isActive = false;
}
WorkflowDefinition storage workflow = _workflows[workflowId];
workflow.id = workflowId;
workflow.workflowType = workflowType;
workflow.name = name;
workflow.definitionHash = definitionHash;
workflow.version = version;
workflow.timestamp = block.timestamp;
workflow.isActive = true;
_latestWorkflows[workflowType] = workflowId;
_workflowIds.push(workflowId);
emit WorkflowRegistered(
workflowId,
workflowType,
name,
definitionHash,
version
);
return workflowId;
}
/**
* @notice Get workflow definition by ID
* @param workflowId The workflow ID
* @return The WorkflowDefinition struct
*/
function getWorkflow(bytes32 workflowId)
public
view
returns (WorkflowDefinition memory)
{
require(_workflows[workflowId].timestamp > 0, "Workflow not found");
return _workflows[workflowId];
}
/**
* @notice Get the latest active workflow for a type
* @param workflowType The workflow type
* @return The WorkflowDefinition struct
*/
function getLatestWorkflow(string calldata workflowType)
public
view
returns (WorkflowDefinition memory)
{
bytes32 workflowId = _latestWorkflows[workflowType];
require(workflowId != bytes32(0), "No workflow found for type");
return _workflows[workflowId];
}
/**
* @notice Verify a workflow definition hash
* @param workflowId The workflow ID
* @param definitionHash The hash to verify
* @return True if the hash matches
*/
function verifyWorkflow(bytes32 workflowId, bytes32 definitionHash)
public
view
returns (bool)
{
WorkflowDefinition memory workflow = _workflows[workflowId];
return workflow.isActive && workflow.definitionHash == definitionHash;
}
/**
* @notice Deactivate a workflow
* @param workflowId The workflow ID
*/
function deactivateWorkflow(bytes32 workflowId) public onlyOwner {
require(_workflows[workflowId].timestamp > 0, "Workflow not found");
require(_workflows[workflowId].isActive, "Workflow already inactive");
_workflows[workflowId].isActive = false;
emit WorkflowDeactivated(workflowId);
}
/**
* @notice Activate a workflow
* @param workflowId The workflow ID
*/
function activateWorkflow(bytes32 workflowId) public onlyOwner {
require(_workflows[workflowId].timestamp > 0, "Workflow not found");
require(!_workflows[workflowId].isActive, "Workflow already active");
_workflows[workflowId].isActive = true;
emit WorkflowActivated(workflowId);
}
/**
* @notice Get total workflow count
* @return The number of registered workflows
*/
function getWorkflowCount() public view returns (uint256) {
return _workflowIds.length;
}
/**
* @notice Check if a workflow is active
* @param workflowId The workflow ID
* @return True if active
*/
function isWorkflowActive(bytes32 workflowId) public view returns (bool) {
return _workflows[workflowId].isActive;
}
}

View File

@@ -0,0 +1,54 @@
import { HardhatUserConfig } from 'hardhat/config';
import '@nomicfoundation/hardhat-toolbox';
import * as dotenv from 'dotenv';
dotenv.config();
const PRIVATE_KEY = process.env.DEPLOYER_PRIVATE_KEY || '0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80';
const config: HardhatUserConfig = {
solidity: {
version: '0.8.20',
settings: {
optimizer: {
enabled: true,
runs: 200,
},
},
},
networks: {
hardhat: {
chainId: 1337,
},
localhost: {
url: 'http://127.0.0.1:8545',
chainId: 1337,
},
besu: {
url: process.env.BESU_RPC_URL || 'http://localhost:8545',
chainId: 1337,
accounts: [PRIVATE_KEY],
gasPrice: 0,
},
besu_node1: {
url: 'http://localhost:8545',
chainId: 1337,
accounts: [PRIVATE_KEY],
gasPrice: 0,
},
besu_node2: {
url: 'http://localhost:8546',
chainId: 1337,
accounts: [PRIVATE_KEY],
gasPrice: 0,
},
},
paths: {
sources: './contracts',
tests: './test',
cache: './cache',
artifacts: './artifacts',
},
};
export default config;

7900
blockchain/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

22
blockchain/package.json Normal file
View File

@@ -0,0 +1,22 @@
{
"name": "goa-gel-blockchain",
"version": "1.0.0",
"description": "Smart contracts for Goa Government e-Licensing Platform",
"scripts": {
"compile": "hardhat compile",
"test": "hardhat test",
"deploy": "hardhat run scripts/deploy.ts --network besu",
"deploy:local": "hardhat run scripts/deploy.ts --network localhost",
"node": "hardhat node"
},
"devDependencies": {
"@nomicfoundation/hardhat-toolbox": "^4.0.0",
"@types/node": "^20.0.0",
"dotenv": "^16.3.1",
"hardhat": "^2.19.0",
"typescript": "^5.3.0"
},
"dependencies": {
"@openzeppelin/contracts": "^5.0.0"
}
}

View File

@@ -0,0 +1,100 @@
import { ethers } from 'hardhat';
import * as fs from 'fs';
import * as path from 'path';
async function main() {
console.log('Starting deployment to Besu network...\n');
const [deployer] = await ethers.getSigners();
console.log('Deploying contracts with account:', deployer.address);
const balance = await ethers.provider.getBalance(deployer.address);
console.log('Account balance:', ethers.formatEther(balance), 'ETH\n');
// Deploy LicenseNFT
console.log('Deploying LicenseNFT...');
const LicenseNFT = await ethers.getContractFactory('LicenseNFT');
const licenseNFT = await LicenseNFT.deploy();
await licenseNFT.waitForDeployment();
const licenseNFTAddress = await licenseNFT.getAddress();
console.log('LicenseNFT deployed to:', licenseNFTAddress);
// Deploy ApprovalManager
console.log('\nDeploying ApprovalManager...');
const ApprovalManager = await ethers.getContractFactory('ApprovalManager');
const approvalManager = await ApprovalManager.deploy();
await approvalManager.waitForDeployment();
const approvalManagerAddress = await approvalManager.getAddress();
console.log('ApprovalManager deployed to:', approvalManagerAddress);
// Deploy DocumentChain
console.log('\nDeploying DocumentChain...');
const DocumentChain = await ethers.getContractFactory('DocumentChain');
const documentChain = await DocumentChain.deploy();
await documentChain.waitForDeployment();
const documentChainAddress = await documentChain.getAddress();
console.log('DocumentChain deployed to:', documentChainAddress);
// Deploy WorkflowRegistry
console.log('\nDeploying WorkflowRegistry...');
const WorkflowRegistry = await ethers.getContractFactory('WorkflowRegistry');
const workflowRegistry = await WorkflowRegistry.deploy();
await workflowRegistry.waitForDeployment();
const workflowRegistryAddress = await workflowRegistry.getAddress();
console.log('WorkflowRegistry deployed to:', workflowRegistryAddress);
// Summary
console.log('\n========================================');
console.log('Deployment Complete!');
console.log('========================================');
console.log('Contract Addresses:');
console.log(' LicenseNFT:', licenseNFTAddress);
console.log(' ApprovalManager:', approvalManagerAddress);
console.log(' DocumentChain:', documentChainAddress);
console.log(' WorkflowRegistry:', workflowRegistryAddress);
console.log('========================================\n');
// Save deployment info
const deploymentInfo = {
network: 'besu',
chainId: 1337,
deployer: deployer.address,
timestamp: new Date().toISOString(),
contracts: {
LicenseNFT: licenseNFTAddress,
ApprovalManager: approvalManagerAddress,
DocumentChain: documentChainAddress,
WorkflowRegistry: workflowRegistryAddress,
},
};
const deploymentPath = path.join(__dirname, '../deployments');
if (!fs.existsSync(deploymentPath)) {
fs.mkdirSync(deploymentPath, { recursive: true });
}
fs.writeFileSync(
path.join(deploymentPath, 'deployment.json'),
JSON.stringify(deploymentInfo, null, 2)
);
console.log('Deployment info saved to deployments/deployment.json');
// Generate .env updates
console.log('\n========================================');
console.log('Add these to your backend/.env file:');
console.log('========================================');
console.log(`CONTRACT_ADDRESS_LICENSE_NFT=${licenseNFTAddress}`);
console.log(`CONTRACT_ADDRESS_APPROVAL_MANAGER=${approvalManagerAddress}`);
console.log(`CONTRACT_ADDRESS_DOCUMENT_CHAIN=${documentChainAddress}`);
console.log(`CONTRACT_ADDRESS_WORKFLOW_REGISTRY=${workflowRegistryAddress}`);
console.log('========================================\n');
return deploymentInfo;
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});

View File

@@ -0,0 +1,60 @@
import * as fs from 'fs';
import * as path from 'path';
/**
* Updates the backend .env file with deployed contract addresses
*/
async function main() {
const deploymentPath = path.join(__dirname, '../deployments/deployment.json');
if (!fs.existsSync(deploymentPath)) {
console.error('Deployment file not found. Run deploy.ts first.');
process.exit(1);
}
const deployment = JSON.parse(fs.readFileSync(deploymentPath, 'utf8'));
const backendEnvPath = path.join(__dirname, '../../backend/.env');
if (!fs.existsSync(backendEnvPath)) {
console.error('Backend .env file not found at:', backendEnvPath);
process.exit(1);
}
let envContent = fs.readFileSync(backendEnvPath, 'utf8');
// Contract address mappings
const envUpdates: Record<string, string> = {
CONTRACT_ADDRESS_LICENSE_NFT: deployment.contracts.LicenseNFT,
CONTRACT_ADDRESS_APPROVAL_MANAGER: deployment.contracts.ApprovalManager,
CONTRACT_ADDRESS_DOCUMENT_CHAIN: deployment.contracts.DocumentChain,
CONTRACT_ADDRESS_WORKFLOW_REGISTRY: deployment.contracts.WorkflowRegistry,
};
// Update or append each variable
for (const [key, value] of Object.entries(envUpdates)) {
const regex = new RegExp(`^${key}=.*$`, 'm');
if (regex.test(envContent)) {
envContent = envContent.replace(regex, `${key}=${value}`);
console.log(`Updated ${key}`);
} else {
envContent += `\n${key}=${value}`;
console.log(`Added ${key}`);
}
}
fs.writeFileSync(backendEnvPath, envContent);
console.log('\nBackend .env file updated successfully!');
console.log('Updated contract addresses:');
for (const [key, value] of Object.entries(envUpdates)) {
console.log(` ${key}=${value}`);
}
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});

15
blockchain/tsconfig.json Normal file
View File

@@ -0,0 +1,15 @@
{
"compilerOptions": {
"target": "ES2020",
"module": "commonjs",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"outDir": "./dist",
"declaration": true,
"resolveJsonModule": true
},
"include": ["./scripts", "./test", "./hardhat.config.ts"],
"files": ["./hardhat.config.ts"]
}