mirror of
https://github.com/arkorty/B.Tech-Project-III.git
synced 2026-04-19 12:41:48 +00:00
init
This commit is contained in:
18
dmtp/server/.gitignore
vendored
Normal file
18
dmtp/server/.gitignore
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
node_modules
|
||||
# Keep environment variables out of version control
|
||||
.env
|
||||
|
||||
/generated/prisma
|
||||
dist/
|
||||
# Hardhat
|
||||
artifacts/
|
||||
cache/
|
||||
typechain-types/
|
||||
|
||||
# Logs
|
||||
logs/
|
||||
*.log
|
||||
|
||||
# Hardhat network files
|
||||
ignition/deployments/
|
||||
.openzeppelin/
|
||||
198
dmtp/server/TROUBLESHOOTING.md
Normal file
198
dmtp/server/TROUBLESHOOTING.md
Normal file
@@ -0,0 +1,198 @@
|
||||
# Blockchain Integration - Troubleshooting Guide
|
||||
|
||||
## Summary of Fixes
|
||||
|
||||
### 1. Token Deployment Issue ✅
|
||||
|
||||
**Problem**: Invalid BytesLike value error when deploying test token
|
||||
**Solution**:
|
||||
|
||||
- Used compiled `MockERC20.sol` artifact instead of hardcoded bytecode
|
||||
- Added `resolveJsonModule: true` to `tsconfig.json`
|
||||
- Deployed test cUSD token: `0x845D9D0B4Be004Dcbc17b11160B0C18abBD5FEBD`
|
||||
|
||||
### 2. Contract Mismatch Issue ✅
|
||||
|
||||
**Problem**: TaskEscrow contract was deployed with wrong cUSD address
|
||||
**Solution**:
|
||||
|
||||
- Created diagnostic script (`check-contract-config.ts`)
|
||||
- Redeployed TaskEscrow with correct cUSD token
|
||||
- New TaskEscrow address: `0xa520d207c91C0FE0e9cFe8D63AbE02fd18B2254e`
|
||||
|
||||
### 3. Approval Issue ✅
|
||||
|
||||
**Problem**: Contract couldn't transfer cUSD (no allowance)
|
||||
**Solution**:
|
||||
|
||||
- Created `approve-cusd.ts` script
|
||||
- Added `checkAllowance()` and `approveCUSD()` methods to blockchain service
|
||||
- Approved contract to spend cUSD tokens
|
||||
|
||||
### 4. AI Verification JSON Parsing ✅
|
||||
|
||||
**Problem**: Gemini returning incomplete/malformed JSON responses
|
||||
**Solution**:
|
||||
|
||||
- Added `responseMimeType: "application/json"` to Gemini API calls
|
||||
- Improved JSON extraction with regex fallback
|
||||
- Added better error handling and logging
|
||||
- Implemented fallback parsing for malformed responses
|
||||
|
||||
### 5. Task Not Found Error ⚠️
|
||||
|
||||
**Problem**: "Task does not exist" error when approving submissions
|
||||
**Root Cause**: Multiple possible causes:
|
||||
|
||||
1. Different project directories (`D:\new-celo` vs current workspace)
|
||||
2. Old tasks pointing to old contract addresses
|
||||
3. Database/blockchain sync issues
|
||||
|
||||
**Solution**:
|
||||
|
||||
- Added `getTask()` method to verify task existence
|
||||
- Added `getTaskCounter()` to check blockchain state
|
||||
- Created diagnostic scripts:
|
||||
- `diagnose-task-mismatch.ts` - Check DB vs blockchain
|
||||
- `cleanup-old-tasks.ts` - Mark invalid tasks as expired
|
||||
- `show-env-info.ts` - Show complete environment info
|
||||
|
||||
## Deployed Contracts
|
||||
|
||||
```
|
||||
cUSD Token: 0x845D9D0B4Be004Dcbc17b11160B0C18abBD5FEBD
|
||||
TaskEscrow: 0xa520d207c91C0FE0e9cFe8D63AbE02fd18B2254e
|
||||
Network: Celo Sepolia Testnet
|
||||
Chain ID: 11142220
|
||||
```
|
||||
|
||||
## Useful Scripts
|
||||
|
||||
### Deployment
|
||||
|
||||
```bash
|
||||
# Deploy test cUSD token
|
||||
npx tsx deploy-test-token.ts
|
||||
|
||||
# Deploy TaskEscrow contract
|
||||
npx tsx redeploy-task-escrow.ts
|
||||
|
||||
# Approve cUSD spending
|
||||
npx tsx approve-cusd.ts
|
||||
|
||||
# Create test task
|
||||
npx tsx create-task-with-blockchain.ts
|
||||
```
|
||||
|
||||
### Diagnostics
|
||||
|
||||
```bash
|
||||
# Check contract configuration
|
||||
npx tsx check-contract-config.ts
|
||||
|
||||
# Diagnose task mismatch
|
||||
npx tsx diagnose-task-mismatch.ts
|
||||
|
||||
# Cleanup old/invalid tasks
|
||||
npx tsx cleanup-old-tasks.ts
|
||||
|
||||
# Show environment info
|
||||
npx tsx show-env-info.ts
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
```bash
|
||||
# Test Gemini JSON response
|
||||
npx tsx test-gemini-json.ts
|
||||
```
|
||||
|
||||
## Common Issues & Solutions
|
||||
|
||||
### Issue: "Task does not exist" on blockchain
|
||||
|
||||
**Check**:
|
||||
|
||||
1. Run `npx tsx show-env-info.ts` to verify contract addresses
|
||||
2. Run `npx tsx diagnose-task-mismatch.ts` to check tasks
|
||||
3. Ensure you're in the correct project directory
|
||||
|
||||
**Fix**:
|
||||
|
||||
- If using wrong contract: Update `CONTRACT_ADDRESS` in `.env`
|
||||
- If tasks are old: Run `npx tsx cleanup-old-tasks.ts`
|
||||
- If starting fresh: Create new task with `npx tsx create-task-with-blockchain.ts`
|
||||
|
||||
### Issue: "Insufficient allowance"
|
||||
|
||||
**Check**: Run `npx tsx check-contract-config.ts`
|
||||
|
||||
**Fix**: Run `npx tsx approve-cusd.ts`
|
||||
|
||||
### Issue: Gemini JSON parsing errors
|
||||
|
||||
**Check**: Look for "Invalid JSON response from Gemini" in logs
|
||||
|
||||
**Fix**:
|
||||
|
||||
- Already implemented: JSON mode in API calls
|
||||
- Fallback parsing with regex
|
||||
- Logs now show raw response for debugging
|
||||
|
||||
### Issue: Different project directories
|
||||
|
||||
**Check**: `process.cwd()` and paths in error messages
|
||||
|
||||
**Fix**: Ensure you're running commands from the correct directory:
|
||||
|
||||
```bash
|
||||
cd C:\Users\RAJ\OneDrive\Desktop\micro-job-ai-agent-web3\server
|
||||
```
|
||||
|
||||
## Environment Variables Required
|
||||
|
||||
```env
|
||||
# Database
|
||||
DATABASE_URL=<your_prisma_accelerate_url>
|
||||
|
||||
# Blockchain
|
||||
PRIVATE_KEY=<your_private_key>
|
||||
CELO_RPC_URL=https://forno.celo-sepolia.celo-testnet.org
|
||||
CHAIN_ID=11142220
|
||||
CONTRACT_ADDRESS=0xa520d207c91C0FE0e9cFe8D63AbE02fd18B2254e
|
||||
CUSD_SEPOLIA_ADDRESS=0x845D9D0B4Be004Dcbc17b11160B0C18abBD5FEBD
|
||||
|
||||
# AI
|
||||
GEMINI_API_KEY=<your_gemini_api_key>
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. ✅ Verify environment: `npx tsx show-env-info.ts`
|
||||
2. ✅ Check task status: `npx tsx diagnose-task-mismatch.ts`
|
||||
3. ✅ Test AI verification: `npx tsx test-gemini-json.ts`
|
||||
4. Create new task if needed: `npx tsx create-task-with-blockchain.ts`
|
||||
5. Test full workflow: Submit task → AI verifies → Payment released
|
||||
|
||||
## Architecture Improvements Made
|
||||
|
||||
### Blockchain Service
|
||||
|
||||
- ✅ Added task verification before operations
|
||||
- ✅ Added detailed logging
|
||||
- ✅ Added helper methods: `getTask()`, `getTaskCounter()`, `getContractAddress()`
|
||||
- ✅ Better error messages with context
|
||||
|
||||
### AI Verification Service
|
||||
|
||||
- ✅ JSON mode for Gemini API
|
||||
- ✅ Fallback parsing for malformed responses
|
||||
- ✅ Better error logging
|
||||
- ✅ Response validation
|
||||
|
||||
### Tooling
|
||||
|
||||
- ✅ Comprehensive diagnostic scripts
|
||||
- ✅ Automated cleanup tools
|
||||
- ✅ Environment validation
|
||||
- ✅ Easy testing scripts
|
||||
91
dmtp/server/approve-cusd.ts
Normal file
91
dmtp/server/approve-cusd.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import 'dotenv/config';
|
||||
import { ethers } from 'ethers';
|
||||
|
||||
/**
|
||||
* Approve TaskEscrow contract to spend cUSD tokens
|
||||
* This needs to be done once before creating tasks
|
||||
*/
|
||||
|
||||
async function approveCUSD() {
|
||||
try {
|
||||
console.log('🔐 Approving TaskEscrow to spend cUSD...\n');
|
||||
|
||||
// Initialize provider and signer
|
||||
const provider = new ethers.JsonRpcProvider(
|
||||
process.env.CELO_RPC_URL || 'https://forno.celo-sepolia.celo-testnet.org'
|
||||
);
|
||||
|
||||
const privateKey = process.env.PRIVATE_KEY;
|
||||
if (!privateKey) {
|
||||
throw new Error('PRIVATE_KEY not configured in .env');
|
||||
}
|
||||
|
||||
const signer = new ethers.Wallet(privateKey, provider);
|
||||
console.log(`📝 Approving from: ${signer.address}`);
|
||||
|
||||
// Get contract addresses
|
||||
const cUSDAddress = process.env.CUSD_SEPOLIA_ADDRESS;
|
||||
const taskEscrowAddress = process.env.CONTRACT_ADDRESS;
|
||||
|
||||
if (!cUSDAddress) {
|
||||
throw new Error('CUSD_SEPOLIA_ADDRESS not configured in .env');
|
||||
}
|
||||
if (!taskEscrowAddress) {
|
||||
throw new Error('CONTRACT_ADDRESS not configured in .env');
|
||||
}
|
||||
|
||||
console.log(`💰 cUSD Token: ${cUSDAddress}`);
|
||||
console.log(`📋 TaskEscrow Contract: ${taskEscrowAddress}\n`);
|
||||
|
||||
// Create cUSD contract instance
|
||||
const cUSDContract = new ethers.Contract(
|
||||
cUSDAddress,
|
||||
[
|
||||
'function approve(address spender, uint256 amount) returns (bool)',
|
||||
'function allowance(address owner, address spender) view returns (uint256)',
|
||||
'function balanceOf(address account) view returns (uint256)',
|
||||
],
|
||||
signer
|
||||
);
|
||||
|
||||
// Check current balance
|
||||
const balance = await cUSDContract.balanceOf(signer.address);
|
||||
console.log(`💵 Your cUSD Balance: ${ethers.formatEther(balance)} cUSD`);
|
||||
|
||||
// Check current allowance
|
||||
const currentAllowance = await cUSDContract.allowance(signer.address, taskEscrowAddress);
|
||||
console.log(`🔓 Current Allowance: ${ethers.formatEther(currentAllowance)} cUSD\n`);
|
||||
|
||||
// Approve a large amount (10 million cUSD) so we don't need to approve again
|
||||
const approvalAmount = ethers.parseEther('10000000');
|
||||
console.log(`⏳ Approving ${ethers.formatEther(approvalAmount)} cUSD...`);
|
||||
|
||||
const tx = await cUSDContract.approve(taskEscrowAddress, approvalAmount);
|
||||
console.log(`📍 Transaction sent: ${tx.hash}`);
|
||||
console.log('⏳ Waiting for confirmation...\n');
|
||||
|
||||
const receipt = await tx.wait();
|
||||
console.log(`✅ Approval confirmed in block ${receipt.blockNumber}\n`);
|
||||
|
||||
// Verify new allowance
|
||||
const newAllowance = await cUSDContract.allowance(signer.address, taskEscrowAddress);
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log('✅ SUCCESS! TaskEscrow approved to spend cUSD');
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
|
||||
console.log(`New Allowance: ${ethers.formatEther(newAllowance)} cUSD`);
|
||||
console.log(`Transaction Hash: ${receipt.hash}\n`);
|
||||
|
||||
console.log('🎯 Next Steps:');
|
||||
console.log('You can now create tasks with blockchain integration! 🎉\n');
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: any) {
|
||||
console.error('\n❌ Approval failed:', error.message);
|
||||
if (error.data) {
|
||||
console.error('Error data:', error.data);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
approveCUSD();
|
||||
56
dmtp/server/check-contract-config.ts
Normal file
56
dmtp/server/check-contract-config.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import 'dotenv/config';
|
||||
import { ethers } from 'ethers';
|
||||
import { readFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
async function checkContractConfig() {
|
||||
try {
|
||||
console.log('🔍 Checking TaskEscrow Contract Configuration...\n');
|
||||
|
||||
const provider = new ethers.JsonRpcProvider(
|
||||
process.env.CELO_RPC_URL || 'https://forno.celo-sepolia.celo-testnet.org'
|
||||
);
|
||||
|
||||
const contractAddress = process.env.CONTRACT_ADDRESS;
|
||||
if (!contractAddress) {
|
||||
throw new Error('CONTRACT_ADDRESS not configured');
|
||||
}
|
||||
|
||||
const TaskEscrowABI = JSON.parse(
|
||||
readFileSync(
|
||||
join(__dirname, './artifacts/contracts/TaskEscrow.sol/TaskEscrow.json'),
|
||||
'utf8'
|
||||
)
|
||||
);
|
||||
|
||||
const contract = new ethers.Contract(
|
||||
contractAddress,
|
||||
TaskEscrowABI.abi,
|
||||
provider
|
||||
);
|
||||
|
||||
console.log(`📋 TaskEscrow Contract: ${contractAddress}`);
|
||||
|
||||
const cUSDAddress = await contract.cUSD();
|
||||
console.log(`💰 Configured cUSD Token: ${cUSDAddress}`);
|
||||
console.log(`💰 Expected cUSD Token: ${process.env.CUSD_SEPOLIA_ADDRESS}\n`);
|
||||
|
||||
if (cUSDAddress.toLowerCase() !== process.env.CUSD_SEPOLIA_ADDRESS?.toLowerCase()) {
|
||||
console.log('❌ MISMATCH DETECTED!');
|
||||
console.log('\nThe TaskEscrow contract is configured with a different cUSD token address.');
|
||||
console.log('\n🔧 Solutions:');
|
||||
console.log('1. Redeploy TaskEscrow contract with the new cUSD address');
|
||||
console.log('2. Update CUSD_SEPOLIA_ADDRESS in .env to match the contract\'s cUSD address');
|
||||
console.log(` CUSD_SEPOLIA_ADDRESS=${cUSDAddress}\n`);
|
||||
} else {
|
||||
console.log('✅ cUSD addresses match!');
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: any) {
|
||||
console.error('❌ Error:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
checkContractConfig();
|
||||
12
dmtp/server/check-db.sh
Normal file
12
dmtp/server/check-db.sh
Normal file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
# Check if verification worker is processing jobs
|
||||
|
||||
cd c:\Users\RAJ\OneDrive\Desktop\micro-job-ai-agent-web3\server
|
||||
|
||||
echo "📋 Checking for payments with pending status..."
|
||||
echo ""
|
||||
echo "SELECT id, task_id, worker_id, amount, transaction_hash, status, created_at FROM payments ORDER BY created_at DESC LIMIT 10;" | psql "$DATABASE_URL" 2>/dev/null || echo "⚠️ Could not query database directly"
|
||||
|
||||
echo ""
|
||||
echo "📝 Checking submissions with pending verification status..."
|
||||
echo "SELECT id, task_id, worker_id, verification_status, created_at FROM submissions ORDER BY created_at DESC LIMIT 10;" | psql "$DATABASE_URL" 2>/dev/null || echo "⚠️ Could not query database directly"
|
||||
66
dmtp/server/check-stalled-job.ts
Normal file
66
dmtp/server/check-stalled-job.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import 'dotenv/config';
|
||||
import { prisma } from './src/database/connections';
|
||||
import { verificationQueue } from './src/queues/verification.queue';
|
||||
|
||||
async function checkStalledJob() {
|
||||
console.log('🔍 Checking for stalled jobs...\n');
|
||||
|
||||
const submissionId = '337c16f7-081e-4b3c-8aee-4d9ffa0e3682';
|
||||
|
||||
// Check submission status
|
||||
const submission = await prisma.submission.findUnique({
|
||||
where: { id: submissionId },
|
||||
include: {
|
||||
task: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!submission) {
|
||||
console.log('❌ Submission not found');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`📋 Submission: ${submission.id}`);
|
||||
console.log(` Status: ${submission.verificationStatus}`);
|
||||
console.log(` Task: ${submission.task.title}`);
|
||||
console.log(` Contract Task ID: ${submission.task.contractTaskId}`);
|
||||
|
||||
// Check queue jobs
|
||||
const jobs = await verificationQueue.getJobs(['active', 'waiting', 'delayed', 'failed', 'completed']);
|
||||
const jobForSubmission = jobs.find(j => j.data.submissionId === submissionId);
|
||||
|
||||
if (jobForSubmission) {
|
||||
console.log(`\n📊 Job found: ${jobForSubmission.id}`);
|
||||
console.log(` State: ${await jobForSubmission.getState()}`);
|
||||
console.log(` Attempts: ${jobForSubmission.attemptsMade}`);
|
||||
console.log(` Data:`, jobForSubmission.data);
|
||||
|
||||
// Get job state
|
||||
const state = await jobForSubmission.getState();
|
||||
|
||||
if (state === 'failed') {
|
||||
console.log(`\n❌ Job failed. Error:`, jobForSubmission.failedReason);
|
||||
console.log('\n🔄 Retrying job...');
|
||||
await jobForSubmission.retry();
|
||||
console.log('✅ Job retried');
|
||||
} else if (state === 'completed') {
|
||||
console.log('\n✅ Job completed');
|
||||
console.log('Result:', jobForSubmission.returnvalue);
|
||||
} else {
|
||||
console.log(`\n⚠️ Job in state: ${state}`);
|
||||
}
|
||||
} else {
|
||||
console.log('\n❌ No job found in queue for this submission');
|
||||
}
|
||||
|
||||
// Queue stats
|
||||
const stats = await verificationQueue.getJobCounts();
|
||||
console.log('\n📊 Queue Stats:', stats);
|
||||
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
checkStalledJob().catch(error => {
|
||||
console.error('Error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
43
dmtp/server/check-tasks-without-contract.ts
Normal file
43
dmtp/server/check-tasks-without-contract.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import 'dotenv/config';
|
||||
import { prisma } from './src/database/connections';
|
||||
|
||||
async function checkTasksWithoutContract() {
|
||||
console.log('🔍 Checking tasks without blockchain contract...\n');
|
||||
|
||||
const tasksWithoutContract = await prisma.task.findMany({
|
||||
where: {
|
||||
contractTaskId: null,
|
||||
},
|
||||
include: {
|
||||
requester: {
|
||||
select: {
|
||||
walletAddress: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'desc',
|
||||
},
|
||||
take: 10,
|
||||
});
|
||||
|
||||
console.log(`Found ${tasksWithoutContract.length} tasks without blockchain contract:\n`);
|
||||
|
||||
for (const task of tasksWithoutContract) {
|
||||
console.log(`━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━`);
|
||||
console.log(`📋 Task: ${task.title}`);
|
||||
console.log(` ID: ${task.id}`);
|
||||
console.log(` Requester: ${task.requester.walletAddress}`);
|
||||
console.log(` Amount: ${task.paymentAmount} cUSD`);
|
||||
console.log(` Status: ${task.status}`);
|
||||
console.log(` Created: ${task.createdAt}`);
|
||||
console.log(` Contract Task ID: ${task.contractTaskId}`);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
checkTasksWithoutContract().catch(error => {
|
||||
console.error('Error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
36
dmtp/server/check-wallet-balance.ts
Normal file
36
dmtp/server/check-wallet-balance.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import 'dotenv/config';
|
||||
import { blockchainService } from './src/services/blockchain.service';
|
||||
|
||||
async function checkWalletBalance() {
|
||||
try {
|
||||
|
||||
const walletAddress = '0xA0e793E7257c065b30c46Ef6828F2B3C0de87A8E';
|
||||
|
||||
console.log('💰 Checking cUSD balance...\n');
|
||||
console.log(`Wallet: ${walletAddress}`);
|
||||
|
||||
const balance = await blockchainService.getCUSDBalance(walletAddress);
|
||||
|
||||
console.log(`\nBalance: ${balance} cUSD`);
|
||||
|
||||
if (parseFloat(balance) === 0) {
|
||||
console.log('\n❌ No cUSD balance!');
|
||||
console.log('\n📝 To get cUSD on Celo Sepolia:');
|
||||
console.log('1. Get testnet CELO from: https://faucet.celo.org');
|
||||
console.log('2. Swap CELO for cUSD on Uniswap or Mento');
|
||||
console.log('3. Or use the Celo wallet to get test cUSD');
|
||||
} else {
|
||||
console.log('\n✅ Wallet has cUSD!');
|
||||
console.log('\n📝 Next step:');
|
||||
console.log('The wallet needs to APPROVE the TaskEscrow contract to spend cUSD.');
|
||||
console.log('This is normally done through the frontend when creating a task.');
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: any) {
|
||||
console.error('Error:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
checkWalletBalance();
|
||||
94
dmtp/server/cleanup-old-tasks.ts
Normal file
94
dmtp/server/cleanup-old-tasks.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import 'dotenv/config';
|
||||
import { prisma } from './src/database/connections';
|
||||
import { blockchainService } from './src/services/blockchain.service';
|
||||
|
||||
async function cleanupOldTasks() {
|
||||
try {
|
||||
console.log('🧹 Cleaning up tasks from old contracts...\n');
|
||||
|
||||
const currentContract = blockchainService.getContractAddress();
|
||||
console.log(`📋 Current Contract: ${currentContract}\n`);
|
||||
|
||||
// Find all tasks with contractTaskId
|
||||
const tasks = await prisma.task.findMany({
|
||||
where: {
|
||||
contractTaskId: {
|
||||
not: null,
|
||||
},
|
||||
status: {
|
||||
in: ['open', 'in_progress'],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`Found ${tasks.length} active tasks with blockchain integration\n`);
|
||||
|
||||
let invalidTasks = 0;
|
||||
let validTasks = 0;
|
||||
|
||||
for (const task of tasks) {
|
||||
try {
|
||||
const blockchainTask = await blockchainService.getTask(task.contractTaskId!);
|
||||
|
||||
if (!blockchainTask) {
|
||||
console.log(`❌ Task ${task.id} (Contract ID: ${task.contractTaskId}) - NOT FOUND on current contract`);
|
||||
invalidTasks++;
|
||||
} else {
|
||||
console.log(`✅ Task ${task.id} (Contract ID: ${task.contractTaskId}) - Valid`);
|
||||
validTasks++;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`❌ Task ${task.id} (Contract ID: ${task.contractTaskId}) - Error checking`);
|
||||
invalidTasks++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log(`Summary:`);
|
||||
console.log(` Valid tasks: ${validTasks}`);
|
||||
console.log(` Invalid tasks: ${invalidTasks}`);
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
|
||||
|
||||
if (invalidTasks > 0) {
|
||||
console.log('⚠️ Found tasks referencing old contract!\n');
|
||||
console.log('Options:');
|
||||
console.log('1. Mark invalid tasks as "cancelled" (recommended)');
|
||||
console.log('2. Delete invalid tasks completely');
|
||||
console.log('3. Do nothing (manual cleanup)\n');
|
||||
|
||||
// For now, let's mark them as cancelled
|
||||
console.log('Marking invalid tasks as cancelled...\n');
|
||||
|
||||
for (const task of tasks) {
|
||||
try {
|
||||
const blockchainTask = await blockchainService.getTask(task.contractTaskId!);
|
||||
|
||||
if (!blockchainTask) {
|
||||
await prisma.task.update({
|
||||
where: { id: task.id },
|
||||
data: { status: 'expired' as any },
|
||||
});
|
||||
console.log(`✅ Marked task ${task.id} as expired`);
|
||||
}
|
||||
} catch (error) {
|
||||
await prisma.task.update({
|
||||
where: { id: task.id },
|
||||
data: { status: 'expired' as any },
|
||||
});
|
||||
console.log(`✅ Marked task ${task.id} as expired`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n✅ Cleanup complete!');
|
||||
} else {
|
||||
console.log('✅ All tasks are valid - no cleanup needed!');
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: any) {
|
||||
console.error('❌ Error:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
cleanupOldTasks();
|
||||
48
dmtp/server/cleanup-test-data.ts
Normal file
48
dmtp/server/cleanup-test-data.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import 'dotenv/config';
|
||||
import { prisma } from './src/database/connections';
|
||||
|
||||
async function cleanupTestData() {
|
||||
console.log('🧹 Cleaning up test data...\n');
|
||||
|
||||
// Delete submissions for tasks without contracts
|
||||
const deletedSubmissions = await prisma.submission.deleteMany({
|
||||
where: {
|
||||
task: {
|
||||
contractTaskId: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(`✅ Deleted ${deletedSubmissions.count} submissions for tasks without contracts`);
|
||||
|
||||
// Delete payments for tasks without contracts
|
||||
const deletedPayments = await prisma.payment.deleteMany({
|
||||
where: {
|
||||
task: {
|
||||
contractTaskId: null,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(`✅ Deleted ${deletedPayments.count} payments for tasks without contracts`);
|
||||
|
||||
// Delete tasks without contracts
|
||||
const deletedTasks = await prisma.task.deleteMany({
|
||||
where: {
|
||||
contractTaskId: null,
|
||||
},
|
||||
});
|
||||
console.log(`✅ Deleted ${deletedTasks.count} tasks without blockchain contracts`);
|
||||
|
||||
console.log('\n✅ Cleanup complete!');
|
||||
console.log('\n📝 Next steps:');
|
||||
console.log('1. Create a new task through the API with a REAL wallet address');
|
||||
console.log('2. Make sure you have cUSD in your wallet');
|
||||
console.log('3. Submit work to the task');
|
||||
console.log('4. Watch the payment process automatically!');
|
||||
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
cleanupTestData().catch(error => {
|
||||
console.error('Error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
12
dmtp/server/contracts/MockERC20.sol
Normal file
12
dmtp/server/contracts/MockERC20.sol
Normal file
@@ -0,0 +1,12 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
import "@openzeppelin/contracts/token/ERC20/ERC20.sol";
|
||||
|
||||
contract MockERC20 is ERC20 {
|
||||
constructor(string memory name, string memory symbol) ERC20(name, symbol) {}
|
||||
|
||||
function mint(address to, uint256 amount) external {
|
||||
_mint(to, amount);
|
||||
}
|
||||
}
|
||||
288
dmtp/server/contracts/TaskEscrow.sol
Normal file
288
dmtp/server/contracts/TaskEscrow.sol
Normal file
@@ -0,0 +1,288 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
pragma solidity ^0.8.20;
|
||||
|
||||
import "@openzeppelin/contracts/token/ERC20/IERC20.sol";
|
||||
import "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol";
|
||||
import "@openzeppelin/contracts/access/Ownable.sol";
|
||||
import "@openzeppelin/contracts/utils/ReentrancyGuard.sol";
|
||||
import "@openzeppelin/contracts/utils/Pausable.sol";
|
||||
|
||||
/**
|
||||
* @title TaskEscrow
|
||||
* @dev Escrow smart contract for AI-powered micro-task marketplace on CELO Sepolia Testnet.
|
||||
* @notice This contract securely holds cUSD payments for tasks, releasing to workers upon AI-verification approval.
|
||||
*/
|
||||
contract TaskEscrow is Ownable, ReentrancyGuard, Pausable {
|
||||
using SafeERC20 for IERC20;
|
||||
|
||||
// ============ Constants & Parameters ============
|
||||
IERC20 public immutable cUSD;
|
||||
|
||||
uint256 public constant PLATFORM_FEE_BPS = 500; // 5%
|
||||
uint256 public constant BPS_DENOMINATOR = 10000;
|
||||
|
||||
uint256 public platformFeesAccumulated;
|
||||
uint256 public taskCounter;
|
||||
|
||||
// ============ Enums ============
|
||||
enum TaskStatus {
|
||||
Open,
|
||||
InProgress,
|
||||
Completed,
|
||||
Cancelled,
|
||||
Expired
|
||||
}
|
||||
|
||||
// ============ Struct ============
|
||||
struct Task {
|
||||
uint256 taskId;
|
||||
address requester;
|
||||
address worker;
|
||||
uint256 paymentAmount;
|
||||
TaskStatus status;
|
||||
uint256 createdAt;
|
||||
uint256 expiresAt;
|
||||
}
|
||||
|
||||
// ============ Mappings ============
|
||||
mapping(uint256 => Task) public tasks;
|
||||
mapping(uint256 => bool) public taskExists;
|
||||
|
||||
// ============ Events ============
|
||||
event TaskCreated(
|
||||
uint256 indexed taskId,
|
||||
address indexed requester,
|
||||
uint256 payment,
|
||||
uint256 expiresAt
|
||||
);
|
||||
event WorkerAssigned(uint256 indexed taskId, address indexed worker);
|
||||
event PaymentReleased(
|
||||
uint256 indexed taskId,
|
||||
address indexed worker,
|
||||
uint256 workerAmount,
|
||||
uint256 platformFee
|
||||
);
|
||||
event TaskCancelled(
|
||||
uint256 indexed taskId,
|
||||
address indexed requester,
|
||||
uint256 refunded
|
||||
);
|
||||
event TaskExpired(
|
||||
uint256 indexed taskId,
|
||||
address indexed requester,
|
||||
uint256 refunded
|
||||
);
|
||||
event PlatformFeesWithdrawn(address indexed owner, uint256 amount);
|
||||
event DebugLog(
|
||||
string action,
|
||||
address sender,
|
||||
uint256 taskId,
|
||||
uint256 timestamp
|
||||
);
|
||||
|
||||
// ============ Modifiers ============
|
||||
modifier validTask(uint256 _taskId) {
|
||||
require(taskExists[_taskId], "Invalid task ID");
|
||||
_;
|
||||
}
|
||||
|
||||
modifier onlyRequester(uint256 _taskId) {
|
||||
require(tasks[_taskId].requester == msg.sender, "Not requester");
|
||||
_;
|
||||
}
|
||||
|
||||
modifier taskIsOpen(uint256 _taskId) {
|
||||
require(tasks[_taskId].status == TaskStatus.Open, "Not open");
|
||||
_;
|
||||
}
|
||||
|
||||
modifier taskInProgress(uint256 _taskId) {
|
||||
require(
|
||||
tasks[_taskId].status == TaskStatus.InProgress,
|
||||
"Not in progress"
|
||||
);
|
||||
_;
|
||||
}
|
||||
|
||||
// ============ Constructor ============
|
||||
/**
|
||||
* @param _cUSDAddress Valid cUSD contract address on Celo Sepolia.
|
||||
*/
|
||||
constructor(address _cUSDAddress) Ownable(msg.sender) {
|
||||
require(_cUSDAddress != address(0), "Invalid cUSD address");
|
||||
cUSD = IERC20(_cUSDAddress);
|
||||
}
|
||||
|
||||
// ============ Core Logic ============
|
||||
|
||||
function createTask(
|
||||
uint256 _paymentAmount,
|
||||
uint256 _durationInDays
|
||||
) external whenNotPaused nonReentrant returns (uint256) {
|
||||
require(_paymentAmount > 0, "Invalid amount");
|
||||
require(
|
||||
_durationInDays > 0 && _durationInDays <= 90,
|
||||
"Invalid duration"
|
||||
);
|
||||
|
||||
taskCounter++;
|
||||
uint256 newTaskId = taskCounter;
|
||||
uint256 expiry = block.timestamp + (_durationInDays * 1 days);
|
||||
|
||||
cUSD.safeTransferFrom(msg.sender, address(this), _paymentAmount);
|
||||
|
||||
tasks[newTaskId] = Task({
|
||||
taskId: newTaskId,
|
||||
requester: msg.sender,
|
||||
worker: address(0),
|
||||
paymentAmount: _paymentAmount,
|
||||
status: TaskStatus.Open,
|
||||
createdAt: block.timestamp,
|
||||
expiresAt: expiry
|
||||
});
|
||||
|
||||
taskExists[newTaskId] = true;
|
||||
|
||||
emit TaskCreated(newTaskId, msg.sender, _paymentAmount, expiry);
|
||||
emit DebugLog("createTask", msg.sender, newTaskId, block.timestamp);
|
||||
|
||||
return newTaskId;
|
||||
}
|
||||
|
||||
function assignWorker(
|
||||
uint256 _taskId,
|
||||
address _worker
|
||||
) external validTask(_taskId) taskIsOpen(_taskId) whenNotPaused {
|
||||
Task storage task = tasks[_taskId];
|
||||
require(
|
||||
msg.sender == task.requester || msg.sender == owner(),
|
||||
"Not authorized"
|
||||
);
|
||||
require(_worker != address(0), "Invalid worker");
|
||||
require(_worker != task.requester, "Requester cannot be worker");
|
||||
|
||||
task.worker = _worker;
|
||||
task.status = TaskStatus.InProgress;
|
||||
|
||||
emit WorkerAssigned(_taskId, _worker);
|
||||
emit DebugLog("assignWorker", msg.sender, _taskId, block.timestamp);
|
||||
}
|
||||
|
||||
function approveSubmission(
|
||||
uint256 _taskId
|
||||
)
|
||||
external
|
||||
onlyOwner
|
||||
validTask(_taskId)
|
||||
taskInProgress(_taskId)
|
||||
nonReentrant
|
||||
{
|
||||
Task storage task = tasks[_taskId];
|
||||
uint256 total = task.paymentAmount;
|
||||
uint256 platformFee = (total * PLATFORM_FEE_BPS) / BPS_DENOMINATOR;
|
||||
uint256 workerShare = total - platformFee;
|
||||
|
||||
task.status = TaskStatus.Completed;
|
||||
platformFeesAccumulated += platformFee;
|
||||
cUSD.safeTransfer(task.worker, workerShare);
|
||||
|
||||
emit PaymentReleased(_taskId, task.worker, workerShare, platformFee);
|
||||
emit DebugLog(
|
||||
"approveSubmission",
|
||||
msg.sender,
|
||||
_taskId,
|
||||
block.timestamp
|
||||
);
|
||||
}
|
||||
|
||||
function rejectSubmission(
|
||||
uint256 _taskId
|
||||
)
|
||||
external
|
||||
onlyOwner
|
||||
validTask(_taskId)
|
||||
taskInProgress(_taskId)
|
||||
nonReentrant
|
||||
{
|
||||
Task storage task = tasks[_taskId];
|
||||
task.status = TaskStatus.Cancelled;
|
||||
|
||||
cUSD.safeTransfer(task.requester, task.paymentAmount);
|
||||
|
||||
emit TaskCancelled(_taskId, task.requester, task.paymentAmount);
|
||||
emit DebugLog("rejectSubmission", msg.sender, _taskId, block.timestamp);
|
||||
}
|
||||
|
||||
function cancelTask(
|
||||
uint256 _taskId
|
||||
)
|
||||
external
|
||||
validTask(_taskId)
|
||||
taskIsOpen(_taskId)
|
||||
onlyRequester(_taskId)
|
||||
nonReentrant
|
||||
{
|
||||
Task storage task = tasks[_taskId];
|
||||
task.status = TaskStatus.Cancelled;
|
||||
|
||||
cUSD.safeTransfer(task.requester, task.paymentAmount);
|
||||
|
||||
emit TaskCancelled(_taskId, task.requester, task.paymentAmount);
|
||||
emit DebugLog("cancelTask", msg.sender, _taskId, block.timestamp);
|
||||
}
|
||||
|
||||
function claimExpiredTask(
|
||||
uint256 _taskId
|
||||
) external validTask(_taskId) nonReentrant {
|
||||
Task storage task = tasks[_taskId];
|
||||
require(block.timestamp >= task.expiresAt, "Not expired");
|
||||
require(
|
||||
task.status == TaskStatus.Open ||
|
||||
task.status == TaskStatus.InProgress,
|
||||
"Already finalized"
|
||||
);
|
||||
require(msg.sender == task.requester, "Only requester");
|
||||
|
||||
task.status = TaskStatus.Expired;
|
||||
cUSD.safeTransfer(task.requester, task.paymentAmount);
|
||||
|
||||
emit TaskExpired(_taskId, task.requester, task.paymentAmount);
|
||||
emit DebugLog("claimExpiredTask", msg.sender, _taskId, block.timestamp);
|
||||
}
|
||||
|
||||
function withdrawPlatformFees() external onlyOwner nonReentrant {
|
||||
uint256 amount = platformFeesAccumulated;
|
||||
require(amount > 0, "No fees");
|
||||
platformFeesAccumulated = 0;
|
||||
cUSD.safeTransfer(owner(), amount);
|
||||
|
||||
emit PlatformFeesWithdrawn(owner(), amount);
|
||||
emit DebugLog("withdrawPlatformFees", msg.sender, 0, block.timestamp);
|
||||
}
|
||||
|
||||
// ============ Views ============
|
||||
function getTask(
|
||||
uint256 _taskId
|
||||
) external view validTask(_taskId) returns (Task memory) {
|
||||
return tasks[_taskId];
|
||||
}
|
||||
|
||||
function isTaskExpired(
|
||||
uint256 _taskId
|
||||
) external view validTask(_taskId) returns (bool) {
|
||||
return block.timestamp >= tasks[_taskId].expiresAt;
|
||||
}
|
||||
|
||||
function getContractBalance() external view returns (uint256) {
|
||||
return cUSD.balanceOf(address(this));
|
||||
}
|
||||
|
||||
// ============ Admin Controls ============
|
||||
function pause() external onlyOwner {
|
||||
_pause();
|
||||
}
|
||||
|
||||
function unpause() external onlyOwner {
|
||||
_unpause();
|
||||
}
|
||||
}
|
||||
132
dmtp/server/create-task-with-blockchain.ts
Normal file
132
dmtp/server/create-task-with-blockchain.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import 'dotenv/config';
|
||||
import { ethers } from 'ethers';
|
||||
import { prisma } from './src/database/connections';
|
||||
import { blockchainService } from './src/services/blockchain.service';
|
||||
import { TaskType } from './src/types/database.types';
|
||||
|
||||
/**
|
||||
* This script creates a task directly using the blockchain service,
|
||||
* bypassing the API authentication requirements.
|
||||
*
|
||||
* This is useful for testing the complete payment flow.
|
||||
*/
|
||||
|
||||
async function createTaskDirectly() {
|
||||
try {
|
||||
console.log('🚀 Creating task with blockchain integration...\n');
|
||||
|
||||
// Task creator wallet (must be registered first)
|
||||
const requesterWallet = '0xA0e793E7257c065b30c46Ef6828F2B3C0de87A8E';
|
||||
|
||||
// Check if user exists, if not create them
|
||||
let requester = await prisma.user.findUnique({
|
||||
where: { walletAddress: requesterWallet.toLowerCase() },
|
||||
});
|
||||
|
||||
if (!requester) {
|
||||
console.log('📝 Creating user...');
|
||||
requester = await prisma.user.create({
|
||||
data: {
|
||||
walletAddress: requesterWallet.toLowerCase(),
|
||||
reputationScore: 100,
|
||||
role: 'worker',
|
||||
},
|
||||
});
|
||||
console.log(`✅ User created: ${requester.id}\n`);
|
||||
} else {
|
||||
console.log(`✅ User found: ${requester.id}\n`);
|
||||
}
|
||||
|
||||
// Task details
|
||||
const taskData = {
|
||||
title: 'Verify this text - BLOCKCHAIN TASK 3',
|
||||
description: 'Check if text makes sense. This task has proper blockchain integration!',
|
||||
taskType: TaskType.TEXT_VERIFICATION,
|
||||
paymentAmount: 0.01,
|
||||
verificationCriteria: {
|
||||
aiPrompt: 'Verify if the text makes sense',
|
||||
requiredFields: ['text'],
|
||||
},
|
||||
maxSubmissions: 1,
|
||||
expiresAt: new Date('2025-11-01T00:00:00Z'),
|
||||
};
|
||||
|
||||
console.log('💰 Task Details:');
|
||||
console.log(` Title: ${taskData.title}`);
|
||||
console.log(` Payment: ${taskData.paymentAmount} cUSD`);
|
||||
console.log(` Type: ${taskData.taskType}`);
|
||||
console.log(` Max Submissions: ${taskData.maxSubmissions}`);
|
||||
console.log(` Expires: ${taskData.expiresAt}\n`);
|
||||
|
||||
// Calculate duration
|
||||
const durationMs = taskData.expiresAt.getTime() - Date.now();
|
||||
const durationInDays = Math.ceil(durationMs / (1000 * 60 * 60 * 24));
|
||||
console.log(`⏰ Duration: ${durationInDays} days\n`);
|
||||
|
||||
// Step 1: Create task on blockchain
|
||||
console.log('⛓️ Creating task on blockchain...');
|
||||
const blockchainResult = await blockchainService.createTask(
|
||||
taskData.paymentAmount.toString(),
|
||||
durationInDays
|
||||
);
|
||||
console.log(`✅ Blockchain task created!`);
|
||||
console.log(` Contract Task ID: ${blockchainResult.taskId}`);
|
||||
console.log(` Transaction Hash: ${blockchainResult.txHash}\n`);
|
||||
|
||||
// Step 2: Store in database
|
||||
console.log('💾 Storing task in database...');
|
||||
const task = await prisma.task.create({
|
||||
data: {
|
||||
requesterId: requester.id,
|
||||
title: taskData.title,
|
||||
description: taskData.description,
|
||||
taskType: taskData.taskType,
|
||||
paymentAmount: taskData.paymentAmount,
|
||||
verificationCriteria: taskData.verificationCriteria,
|
||||
maxSubmissions: taskData.maxSubmissions,
|
||||
expiresAt: taskData.expiresAt,
|
||||
contractTaskId: blockchainResult.taskId,
|
||||
status: 'open',
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`✅ Task created in database!`);
|
||||
console.log(` Task ID: ${task.id}`);
|
||||
console.log(` Contract Task ID: ${task.contractTaskId}\n`);
|
||||
|
||||
// Update user stats
|
||||
await prisma.user.update({
|
||||
where: { id: requester.id },
|
||||
data: {
|
||||
totalTasksCreated: {
|
||||
increment: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log('✅ SUCCESS! Task created with blockchain integration!');
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
|
||||
|
||||
console.log('📋 Task Summary:');
|
||||
console.log(` Database ID: ${task.id}`);
|
||||
console.log(` Blockchain Contract ID: ${task.contractTaskId}`);
|
||||
console.log(` Status: ${task.status}`);
|
||||
console.log(` Payment Amount: ${task.paymentAmount} cUSD`);
|
||||
console.log(` Transaction Hash: ${blockchainResult.txHash}\n`);
|
||||
|
||||
console.log('🎯 Next Steps:');
|
||||
console.log('1. View this task at: http://localhost:3000/tasks');
|
||||
console.log(`2. Submit work to task ID: ${task.id}`);
|
||||
console.log('3. Verification will run automatically');
|
||||
console.log('4. Payment will be released on approval! 🎉\n');
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: any) {
|
||||
console.error('\n❌ Error creating task:', error.message);
|
||||
console.error('Stack:', error.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
createTaskDirectly();
|
||||
76
dmtp/server/debug-payment.js
Normal file
76
dmtp/server/debug-payment.js
Normal file
@@ -0,0 +1,76 @@
|
||||
// Debug script to check payment status for address 0xb12653F335f5C1B56A30afA840d394E90718633A
|
||||
import { prisma } from "./src/database/connections.ts";
|
||||
|
||||
async function debugPayment() {
|
||||
try {
|
||||
console.log(
|
||||
"🔍 Checking payment status for wallet: 0xa0e793e7257c065b30c46ef6828f2b3c0de87a8e"
|
||||
);
|
||||
|
||||
// Find user by wallet
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { walletAddress: "0xa0e793e7257c065b30c46ef6828f2b3c0de87a8e" },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
console.error("❌ User not found");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`\n✅ User found: ${user.id}`);
|
||||
|
||||
// Find all submissions for this worker
|
||||
const submissions = await prisma.submission.findMany({
|
||||
where: { workerId: user.id },
|
||||
include: {
|
||||
task: {
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
paymentAmount: true,
|
||||
contractTaskId: true,
|
||||
status: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
orderBy: { createdAt: "desc" },
|
||||
take: 10,
|
||||
});
|
||||
|
||||
console.log(`\n📝 Found ${submissions.length} submissions:`);
|
||||
|
||||
for (const submission of submissions) {
|
||||
console.log(`\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━`);
|
||||
console.log(`📋 Submission ID: ${submission.id}`);
|
||||
console.log(` Task: ${submission.task.title}`);
|
||||
console.log(` Verification Status: ${submission.verificationStatus}`);
|
||||
console.log(` Task Status: ${submission.task.status}`);
|
||||
console.log(` Contract Task ID: ${submission.task.contractTaskId}`);
|
||||
console.log(` Amount: ${submission.task.paymentAmount} cUSD`);
|
||||
|
||||
// Find payment record
|
||||
const payment = await prisma.payment.findFirst({
|
||||
where: {
|
||||
taskId: submission.taskId,
|
||||
workerId: user.id,
|
||||
},
|
||||
});
|
||||
|
||||
if (payment) {
|
||||
console.log(` 💳 Payment Status: ${payment.status}`);
|
||||
console.log(` 💳 Transaction Hash: ${payment.transactionHash}`);
|
||||
console.log(` 💳 Amount: ${payment.amount}`);
|
||||
} else {
|
||||
console.log(` ❌ No payment record found`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━`);
|
||||
} catch (error) {
|
||||
console.error("❌ Error:", error);
|
||||
} finally {
|
||||
await prisma.$disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
debugPayment();
|
||||
99
dmtp/server/deploy-test-token.ts
Normal file
99
dmtp/server/deploy-test-token.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import { ethers } from 'ethers';
|
||||
import * as dotenv from 'dotenv';
|
||||
import MockERC20Artifact from './artifacts/contracts/MockERC20.sol/MockERC20.json';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
async function deployTestToken() {
|
||||
try {
|
||||
console.log('🚀 Deploying Test Token to Celo Sepolia...\n');
|
||||
|
||||
// Initialize provider and signer
|
||||
const provider = new ethers.JsonRpcProvider(
|
||||
process.env.CELO_RPC_URL || 'https://forno.celo-sepolia.celo-testnet.org'
|
||||
);
|
||||
|
||||
const privateKey = process.env.PRIVATE_KEY;
|
||||
if (!privateKey) {
|
||||
throw new Error('PRIVATE_KEY not configured in .env');
|
||||
}
|
||||
|
||||
const signer = new ethers.Wallet(privateKey, provider);
|
||||
console.log(`📝 Deploying from: ${signer.address}`);
|
||||
|
||||
// Check balance
|
||||
const balance = await provider.getBalance(signer.address);
|
||||
const balanceInCELO = ethers.formatEther(balance);
|
||||
console.log(`💰 Account balance: ${balanceInCELO} CELO\n`);
|
||||
|
||||
if (parseFloat(balanceInCELO) === 0) {
|
||||
console.log('❌ Insufficient balance! Get testnet CELO from:');
|
||||
console.log(' https://faucet.celo.org');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Deploy token with name and symbol
|
||||
const tokenName = 'Test USD';
|
||||
const tokenSymbol = 'tUSD';
|
||||
console.log(`📦 Deploying ${tokenName} (${tokenSymbol})...\n`);
|
||||
|
||||
const TestToken = new ethers.ContractFactory(
|
||||
MockERC20Artifact.abi,
|
||||
MockERC20Artifact.bytecode,
|
||||
signer
|
||||
);
|
||||
|
||||
console.log('⏳ Sending deployment transaction...');
|
||||
const token = await TestToken.deploy(tokenName, tokenSymbol);
|
||||
|
||||
console.log(`📍 Deployment transaction sent: ${token.deploymentTransaction()?.hash}`);
|
||||
console.log('⏳ Waiting for confirmation...\n');
|
||||
|
||||
await token.waitForDeployment();
|
||||
const tokenAddress = await token.getAddress();
|
||||
|
||||
console.log('✅ Token deployed successfully!\n');
|
||||
console.log('═══════════════════════════════════════');
|
||||
console.log(`Token Address: ${tokenAddress}`);
|
||||
console.log('═══════════════════════════════════════\n');
|
||||
|
||||
// Verify deployment - create a properly typed contract instance
|
||||
const deployedToken = new ethers.Contract(tokenAddress, MockERC20Artifact.abi, signer);
|
||||
|
||||
// Mint initial supply (1 million tokens)
|
||||
const initialSupply = ethers.parseEther('1000000');
|
||||
console.log('⏳ Minting initial supply...');
|
||||
const mintTx = await deployedToken.mint(signer.address, initialSupply);
|
||||
await mintTx.wait();
|
||||
console.log('✅ Minted 1,000,000 tokens\n');
|
||||
|
||||
const name = await deployedToken.name();
|
||||
const symbol = await deployedToken.symbol();
|
||||
const totalSupply = await deployedToken.totalSupply();
|
||||
const deployerBalance = await deployedToken.balanceOf(signer.address);
|
||||
|
||||
console.log('📊 Token Details:');
|
||||
console.log(` Name: ${name}`);
|
||||
console.log(` Symbol: ${symbol}`);
|
||||
console.log(` Total Supply: ${ethers.formatEther(totalSupply)} ${symbol}`);
|
||||
console.log(` Deployer Balance: ${ethers.formatEther(deployerBalance)} ${symbol}\n`);
|
||||
|
||||
// Output for .env
|
||||
console.log('📝 Update your .env file with:');
|
||||
console.log(` CUSD_SEPOLIA_ADDRESS=${tokenAddress}\n`);
|
||||
|
||||
console.log('📄 To verify on Celoscan:');
|
||||
console.log(` 1. Go to https://sepolia.celoscan.io/address/${tokenAddress}#code`);
|
||||
console.log(` 2. Click "Verify and Publish"`);
|
||||
console.log(` 3. Use contract: contracts/MockERC20.sol:MockERC20`);
|
||||
console.log(` 4. Constructor arguments: "${tokenName}", "${tokenSymbol}"\n`);
|
||||
|
||||
process.exit(0);
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Deployment failed:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
deployTestToken();
|
||||
98
dmtp/server/diagnose-task-mismatch.ts
Normal file
98
dmtp/server/diagnose-task-mismatch.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import 'dotenv/config';
|
||||
import { prisma } from './src/database/connections';
|
||||
import { blockchainService } from './src/services/blockchain.service';
|
||||
|
||||
async function diagnoseTaskMismatch() {
|
||||
try {
|
||||
console.log('🔍 Diagnosing Task Mismatch between Database and Blockchain...\n');
|
||||
|
||||
// Get contract info
|
||||
const contractAddress = blockchainService.getContractAddress();
|
||||
console.log(`📋 Contract Address: ${contractAddress}`);
|
||||
|
||||
const taskCounter = await blockchainService.getTaskCounter();
|
||||
console.log(`📊 Blockchain Task Counter: ${taskCounter}\n`);
|
||||
|
||||
// Get tasks from database with contractTaskId
|
||||
const tasksInDb = await prisma.task.findMany({
|
||||
where: {
|
||||
contractTaskId: {
|
||||
not: null,
|
||||
},
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
contractTaskId: true,
|
||||
status: true,
|
||||
paymentAmount: true,
|
||||
createdAt: true,
|
||||
},
|
||||
orderBy: {
|
||||
createdAt: 'desc',
|
||||
},
|
||||
take: 10,
|
||||
});
|
||||
|
||||
console.log(`💾 Found ${tasksInDb.length} tasks in database with contractTaskId\n`);
|
||||
|
||||
if (tasksInDb.length === 0) {
|
||||
console.log('⚠️ No tasks with blockchain integration found in database');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log('Checking each task on blockchain...\n');
|
||||
|
||||
for (const task of tasksInDb) {
|
||||
console.log(`\n📋 Task: ${task.title}`);
|
||||
console.log(` DB ID: ${task.id}`);
|
||||
console.log(` Contract Task ID: ${task.contractTaskId}`);
|
||||
console.log(` DB Status: ${task.status}`);
|
||||
console.log(` Payment: ${task.paymentAmount} cUSD`);
|
||||
|
||||
try {
|
||||
const blockchainTask = await blockchainService.getTask(task.contractTaskId!);
|
||||
|
||||
if (!blockchainTask) {
|
||||
console.log(` ❌ NOT FOUND on blockchain`);
|
||||
console.log(` Issue: Task ${task.contractTaskId} does not exist on contract ${contractAddress}`);
|
||||
} else {
|
||||
console.log(` ✅ FOUND on blockchain`);
|
||||
console.log(` Blockchain Status: ${['Open', 'InProgress', 'Completed', 'Cancelled', 'Expired'][blockchainTask.status]}`);
|
||||
console.log(` Requester: ${blockchainTask.requester}`);
|
||||
console.log(` Worker: ${blockchainTask.worker === '0x0000000000000000000000000000000000000000' ? 'None' : blockchainTask.worker}`);
|
||||
console.log(` Payment: ${blockchainTask.paymentAmount} cUSD`);
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.log(` ❌ ERROR checking blockchain: ${error.message}`);
|
||||
}
|
||||
|
||||
console.log(' ' + '─'.repeat(60));
|
||||
}
|
||||
|
||||
console.log('\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log('\n💡 Recommendations:');
|
||||
console.log('1. If tasks are NOT FOUND on blockchain:');
|
||||
console.log(' - The contract address may have changed');
|
||||
console.log(' - Check CONTRACT_ADDRESS in .env matches the deployed contract');
|
||||
console.log(' - You may need to create new tasks with the current contract\n');
|
||||
|
||||
console.log('2. If tasks exist but have wrong status:');
|
||||
console.log(' - Database and blockchain are out of sync');
|
||||
console.log(' - Verify worker assignment happened on blockchain\n');
|
||||
|
||||
console.log('3. To fix:');
|
||||
console.log(' - Option A: Update CONTRACT_ADDRESS to the old contract');
|
||||
console.log(' - Option B: Create new tasks with current contract');
|
||||
console.log(' - Option C: Clear old tasks and start fresh\n');
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: any) {
|
||||
console.error('❌ Error:', error.message);
|
||||
console.error(error.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
diagnoseTaskMismatch();
|
||||
68
dmtp/server/hardhat.config.ts
Normal file
68
dmtp/server/hardhat.config.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import "@nomicfoundation/hardhat-toolbox";
|
||||
import "@nomicfoundation/hardhat-verify";
|
||||
import * as dotenv from "dotenv";
|
||||
import { HardhatUserConfig } from "hardhat/config";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const config: HardhatUserConfig = {
|
||||
solidity: {
|
||||
version: "0.8.20",
|
||||
settings: {
|
||||
optimizer: {
|
||||
enabled: true,
|
||||
runs: 200,
|
||||
},
|
||||
},
|
||||
},
|
||||
networks: {
|
||||
// Celo Sepolia Testnet
|
||||
sepolia: {
|
||||
url: "https://forno.celo-sepolia.celo-testnet.org",
|
||||
accounts: process.env.PRIVATE_KEY ? [process.env.PRIVATE_KEY] : [],
|
||||
chainId: 11142220,
|
||||
},
|
||||
// Celo Mainnet
|
||||
celo: {
|
||||
url: "https://forno.celo.org",
|
||||
accounts: process.env.PRIVATE_KEY ? [process.env.PRIVATE_KEY] : [],
|
||||
chainId: 42220,
|
||||
},
|
||||
// Local Hardhat network
|
||||
hardhat: {
|
||||
chainId: 31337,
|
||||
},
|
||||
},
|
||||
etherscan: {
|
||||
apiKey: {
|
||||
sepolia: process.env.CELOSCAN_API_KEY || "",
|
||||
celo: process.env.CELOSCAN_API_KEY || "",
|
||||
},
|
||||
customChains: [
|
||||
{
|
||||
network: "Celo Sepolia Testnet",
|
||||
chainId: 11142220,
|
||||
urls: {
|
||||
apiURL: "https://api-sepolia.celoscan.io/api",
|
||||
browserURL: "https://sepolia.celoscan.io",
|
||||
},
|
||||
},
|
||||
{
|
||||
network: "celo",
|
||||
chainId: 42220,
|
||||
urls: {
|
||||
apiURL: "https://api.celoscan.io/api",
|
||||
browserURL: "https://celoscan.io",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
paths: {
|
||||
sources: "./contracts",
|
||||
tests: "./test",
|
||||
cache: "./cache",
|
||||
artifacts: "./artifacts",
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
10215
dmtp/server/package-lock.json
generated
Normal file
10215
dmtp/server/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
66
dmtp/server/package.json
Normal file
66
dmtp/server/package.json
Normal file
@@ -0,0 +1,66 @@
|
||||
{
|
||||
"name": "server",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"prisma:generate": "prisma generate",
|
||||
"prisma:migrate": "prisma migrate dev",
|
||||
"prisma:studio": "prisma studio",
|
||||
"prisma:push": "prisma db push",
|
||||
"db:seed": "tsx src/database/seed.ts",
|
||||
"compile": "hardhat compile",
|
||||
"test": "mocha -r ts-node/register test/**/*.test.ts",
|
||||
"test:coverage": "hardhat coverage",
|
||||
"deploy:sepolia": "hardhat run scripts/deploy.ts --network sepolia",
|
||||
"deploy:local": "hardhat run scripts/deploy.ts --network hardhat",
|
||||
"interact": "hardhat run scripts/interact.ts --network sepolia",
|
||||
"verify": "hardhat verify --network sepolia",
|
||||
"node": "hardhat node",
|
||||
"test:ai": "mocha -r ts-node/register test/ai-verification.test.ts",
|
||||
"dev": "nodemon --exec tsx src/server.ts",
|
||||
"build": "tsc && npm run copy:js && prisma generate",
|
||||
"copy:js": "rsync -av --include='*/' --include='*.js' --exclude='*' src/ dist/",
|
||||
"start": "node dist/server.js",
|
||||
"start:worker": "node dist/workers/index.js",
|
||||
"start:all": "npm run build && concurrently \"npm run start\" \"npm run start:worker\"",
|
||||
"worker": "ts-node src/workers/index.ts",
|
||||
"dev:worker": "nodemon src/workers/index.ts",
|
||||
"dev:all": "concurrently \"npm run dev\" \"npm run dev:worker\""
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@bull-board/api": "^6.14.0",
|
||||
"@bull-board/express": "^6.14.0",
|
||||
"@google/generative-ai": "^0.24.1",
|
||||
"@openzeppelin/contracts": "^5.4.0",
|
||||
"@prisma/client": "^5.22.0",
|
||||
"@types/bull": "^3.15.9",
|
||||
"@types/cors": "^2.8.19",
|
||||
"@types/express": "^5.0.4",
|
||||
"@types/morgan": "^1.9.10",
|
||||
"bull": "^4.16.5",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^17.2.3",
|
||||
"express": "^5.1.0",
|
||||
"express-validator": "^7.3.0",
|
||||
"helmet": "^8.1.0",
|
||||
"morgan": "^1.10.1",
|
||||
"redis": "^5.9.0",
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nomicfoundation/hardhat-ethers": "^3.1.0",
|
||||
"@nomicfoundation/hardhat-toolbox": "^6.1.0",
|
||||
"@nomicfoundation/hardhat-verify": "^2.1.1",
|
||||
"@types/node": "^24.9.1",
|
||||
"concurrently": "^9.2.1",
|
||||
"ethers": "^6.15.0",
|
||||
"hardhat": "^2.26.3",
|
||||
"nodemon": "^3.0.0",
|
||||
"prisma": "^5.22.0",
|
||||
"tsx": "^4.20.6"
|
||||
}
|
||||
}
|
||||
12
dmtp/server/prisma.config.ts
Normal file
12
dmtp/server/prisma.config.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { defineConfig, env } from "prisma/config";
|
||||
|
||||
export default defineConfig({
|
||||
schema: "prisma/schema.prisma",
|
||||
migrations: {
|
||||
path: "prisma/migrations",
|
||||
},
|
||||
engine: "classic",
|
||||
datasource: {
|
||||
url: env("DATABASE_URL"),
|
||||
},
|
||||
});
|
||||
140
dmtp/server/prisma/migrations/20251026032137_init/migration.sql
Normal file
140
dmtp/server/prisma/migrations/20251026032137_init/migration.sql
Normal file
@@ -0,0 +1,140 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "UserRole" AS ENUM ('requester', 'worker');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "TaskType" AS ENUM ('text_verification', 'image_labeling', 'survey', 'content_moderation');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "TaskStatus" AS ENUM ('open', 'in_progress', 'completed', 'expired');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "VerificationStatus" AS ENUM ('pending', 'approved', 'rejected');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "PaymentStatus" AS ENUM ('pending', 'completed', 'failed');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "tasks" (
|
||||
"id" TEXT NOT NULL,
|
||||
"requester_id" TEXT NOT NULL,
|
||||
"title" VARCHAR(100) NOT NULL,
|
||||
"description" TEXT NOT NULL,
|
||||
"task_type" "TaskType" NOT NULL,
|
||||
"payment_amount" DECIMAL(10,2) NOT NULL,
|
||||
"status" "TaskStatus" NOT NULL DEFAULT 'open',
|
||||
"verification_criteria" JSONB NOT NULL,
|
||||
"max_submissions" INTEGER NOT NULL,
|
||||
"contract_task_id" INTEGER,
|
||||
"expires_at" TIMESTAMP(3) NOT NULL,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "tasks_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "users" (
|
||||
"id" TEXT NOT NULL,
|
||||
"wallet_address" TEXT NOT NULL,
|
||||
"phone_number" TEXT,
|
||||
"role" "UserRole" NOT NULL,
|
||||
"reputation_score" INTEGER NOT NULL DEFAULT 0,
|
||||
"total_earnings" DECIMAL(10,2) NOT NULL DEFAULT 0,
|
||||
"total_tasks_created" INTEGER NOT NULL DEFAULT 0,
|
||||
"total_tasks_completed" INTEGER NOT NULL DEFAULT 0,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "users_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "submissions" (
|
||||
"id" TEXT NOT NULL,
|
||||
"task_id" TEXT NOT NULL,
|
||||
"worker_id" TEXT NOT NULL,
|
||||
"submission_data" JSONB NOT NULL,
|
||||
"ai_verification_result" JSONB,
|
||||
"verification_status" "VerificationStatus" NOT NULL DEFAULT 'pending',
|
||||
"payment_transaction_hash" TEXT,
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updated_at" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "submissions_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "payments" (
|
||||
"id" TEXT NOT NULL,
|
||||
"task_id" TEXT NOT NULL,
|
||||
"worker_id" TEXT NOT NULL,
|
||||
"amount" DECIMAL(10,2) NOT NULL,
|
||||
"transaction_hash" TEXT NOT NULL,
|
||||
"status" "PaymentStatus" NOT NULL DEFAULT 'pending',
|
||||
"created_at" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "payments_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "tasks_requester_id_idx" ON "tasks"("requester_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "tasks_status_idx" ON "tasks"("status");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "tasks_task_type_idx" ON "tasks"("task_type");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "tasks_expires_at_idx" ON "tasks"("expires_at");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "tasks_contract_task_id_idx" ON "tasks"("contract_task_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "users_wallet_address_key" ON "users"("wallet_address");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "users_wallet_address_idx" ON "users"("wallet_address");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "users_role_idx" ON "users"("role");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "submissions_task_id_idx" ON "submissions"("task_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "submissions_worker_id_idx" ON "submissions"("worker_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "submissions_verification_status_idx" ON "submissions"("verification_status");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "submissions_task_id_worker_id_key" ON "submissions"("task_id", "worker_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "payments_task_id_idx" ON "payments"("task_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "payments_worker_id_idx" ON "payments"("worker_id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "payments_transaction_hash_idx" ON "payments"("transaction_hash");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "payments_status_idx" ON "payments"("status");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "tasks" ADD CONSTRAINT "tasks_requester_id_fkey" FOREIGN KEY ("requester_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "submissions" ADD CONSTRAINT "submissions_task_id_fkey" FOREIGN KEY ("task_id") REFERENCES "tasks"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "submissions" ADD CONSTRAINT "submissions_worker_id_fkey" FOREIGN KEY ("worker_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "payments" ADD CONSTRAINT "payments_task_id_fkey" FOREIGN KEY ("task_id") REFERENCES "tasks"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "payments" ADD CONSTRAINT "payments_worker_id_fkey" FOREIGN KEY ("worker_id") REFERENCES "users"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
3
dmtp/server/prisma/migrations/migration_lock.toml
Normal file
3
dmtp/server/prisma/migrations/migration_lock.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
# Please do not edit this file manually
|
||||
# It should be added in your version-control system (i.e. Git)
|
||||
provider = "postgresql"
|
||||
123
dmtp/server/prisma/schema.prisma
Normal file
123
dmtp/server/prisma/schema.prisma
Normal file
@@ -0,0 +1,123 @@
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "postgresql"
|
||||
url = env("DATABASE_URL")
|
||||
}
|
||||
|
||||
model Task {
|
||||
id String @id @default(uuid())
|
||||
requesterId String @map("requester_id")
|
||||
title String @db.VarChar(100)
|
||||
description String
|
||||
taskType TaskType @map("task_type")
|
||||
paymentAmount Decimal @map("payment_amount") @db.Decimal(10, 2)
|
||||
status TaskStatus @default(open)
|
||||
verificationCriteria Json @map("verification_criteria")
|
||||
maxSubmissions Int @map("max_submissions")
|
||||
contractTaskId Int? @map("contract_task_id")
|
||||
expiresAt DateTime @map("expires_at")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @updatedAt @map("updated_at")
|
||||
payments Payment[]
|
||||
submissions Submission[]
|
||||
requester User @relation("TaskRequester", fields: [requesterId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([requesterId])
|
||||
@@index([status])
|
||||
@@index([taskType])
|
||||
@@index([expiresAt])
|
||||
@@index([contractTaskId])
|
||||
@@map("tasks")
|
||||
}
|
||||
|
||||
model User {
|
||||
id String @id @default(uuid())
|
||||
walletAddress String @unique @map("wallet_address")
|
||||
phoneNumber String? @map("phone_number")
|
||||
role UserRole
|
||||
reputationScore Int @default(0) @map("reputation_score")
|
||||
totalEarnings Decimal @default(0) @map("total_earnings") @db.Decimal(10, 2)
|
||||
totalTasksCreated Int @default(0) @map("total_tasks_created")
|
||||
totalTasksCompleted Int @default(0) @map("total_tasks_completed")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @updatedAt @map("updated_at")
|
||||
payments Payment[]
|
||||
submissions Submission[]
|
||||
createdTasks Task[] @relation("TaskRequester")
|
||||
|
||||
@@index([walletAddress])
|
||||
@@index([role])
|
||||
@@map("users")
|
||||
}
|
||||
|
||||
model Submission {
|
||||
id String @id @default(uuid())
|
||||
taskId String @map("task_id")
|
||||
workerId String @map("worker_id")
|
||||
submissionData Json @map("submission_data")
|
||||
aiVerificationResult Json? @map("ai_verification_result")
|
||||
verificationStatus VerificationStatus @default(pending) @map("verification_status")
|
||||
paymentTransactionHash String? @map("payment_transaction_hash")
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
updatedAt DateTime @updatedAt @map("updated_at")
|
||||
task Task @relation(fields: [taskId], references: [id], onDelete: Cascade)
|
||||
worker User @relation(fields: [workerId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([taskId, workerId])
|
||||
@@index([taskId])
|
||||
@@index([workerId])
|
||||
@@index([verificationStatus])
|
||||
@@map("submissions")
|
||||
}
|
||||
|
||||
model Payment {
|
||||
id String @id @default(uuid())
|
||||
taskId String @map("task_id")
|
||||
workerId String @map("worker_id")
|
||||
amount Decimal @db.Decimal(10, 2)
|
||||
transactionHash String @map("transaction_hash")
|
||||
status PaymentStatus @default(pending)
|
||||
createdAt DateTime @default(now()) @map("created_at")
|
||||
task Task @relation(fields: [taskId], references: [id], onDelete: Cascade)
|
||||
worker User @relation(fields: [workerId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([taskId])
|
||||
@@index([workerId])
|
||||
@@index([transactionHash])
|
||||
@@index([status])
|
||||
@@map("payments")
|
||||
}
|
||||
|
||||
enum UserRole {
|
||||
requester
|
||||
worker
|
||||
}
|
||||
|
||||
enum TaskType {
|
||||
text_verification
|
||||
image_labeling
|
||||
survey
|
||||
content_moderation
|
||||
}
|
||||
|
||||
enum TaskStatus {
|
||||
open
|
||||
in_progress
|
||||
completed
|
||||
expired
|
||||
}
|
||||
|
||||
enum VerificationStatus {
|
||||
pending
|
||||
approved
|
||||
rejected
|
||||
}
|
||||
|
||||
enum PaymentStatus {
|
||||
pending
|
||||
completed
|
||||
failed
|
||||
}
|
||||
76
dmtp/server/redeploy-task-escrow.ts
Normal file
76
dmtp/server/redeploy-task-escrow.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import 'dotenv/config';
|
||||
import { ethers } from 'ethers';
|
||||
import { readFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
async function redeployTaskEscrow() {
|
||||
try {
|
||||
console.log('🚀 Redeploying TaskEscrow Contract...\n');
|
||||
|
||||
const provider = new ethers.JsonRpcProvider(
|
||||
process.env.CELO_RPC_URL || 'https://forno.celo-sepolia.celo-testnet.org'
|
||||
);
|
||||
|
||||
const privateKey = process.env.PRIVATE_KEY;
|
||||
if (!privateKey) {
|
||||
throw new Error('PRIVATE_KEY not configured');
|
||||
}
|
||||
|
||||
const signer = new ethers.Wallet(privateKey, provider);
|
||||
console.log(`📝 Deploying from: ${signer.address}`);
|
||||
|
||||
const balance = await provider.getBalance(signer.address);
|
||||
console.log(`💰 Balance: ${ethers.formatEther(balance)} CELO\n`);
|
||||
|
||||
const cUSDAddress = process.env.CUSD_SEPOLIA_ADDRESS;
|
||||
if (!cUSDAddress) {
|
||||
throw new Error('CUSD_SEPOLIA_ADDRESS not configured');
|
||||
}
|
||||
|
||||
console.log(`💰 Using cUSD Token: ${cUSDAddress}\n`);
|
||||
|
||||
// Load contract artifact
|
||||
const TaskEscrowArtifact = JSON.parse(
|
||||
readFileSync(
|
||||
join(__dirname, './artifacts/contracts/TaskEscrow.sol/TaskEscrow.json'),
|
||||
'utf8'
|
||||
)
|
||||
);
|
||||
|
||||
const TaskEscrow = new ethers.ContractFactory(
|
||||
TaskEscrowArtifact.abi,
|
||||
TaskEscrowArtifact.bytecode,
|
||||
signer
|
||||
);
|
||||
|
||||
console.log('⏳ Deploying TaskEscrow contract...');
|
||||
const taskEscrow = await TaskEscrow.deploy(cUSDAddress);
|
||||
|
||||
console.log(`📍 Deployment transaction: ${taskEscrow.deploymentTransaction()?.hash}`);
|
||||
console.log('⏳ Waiting for confirmation...\n');
|
||||
|
||||
await taskEscrow.waitForDeployment();
|
||||
const contractAddress = await taskEscrow.getAddress();
|
||||
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log('✅ TaskEscrow Contract Deployed!');
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
|
||||
console.log(`Contract Address: ${contractAddress}`);
|
||||
console.log(`cUSD Token: ${cUSDAddress}\n`);
|
||||
|
||||
console.log('📝 Update your .env file with:');
|
||||
console.log(` CONTRACT_ADDRESS=${contractAddress}\n`);
|
||||
|
||||
console.log('🎯 Next Steps:');
|
||||
console.log('1. Update CONTRACT_ADDRESS in your .env file');
|
||||
console.log('2. Run: npx tsx approve-cusd.ts');
|
||||
console.log('3. Run: npx tsx create-task-with-blockchain.ts\n');
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: any) {
|
||||
console.error('❌ Deployment failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
redeployTaskEscrow();
|
||||
59
dmtp/server/scripts/deploy.js
Normal file
59
dmtp/server/scripts/deploy.js
Normal file
@@ -0,0 +1,59 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const hardhat_1 = require("hardhat");
|
||||
async function main() {
|
||||
console.log("🚀 Starting deployment to Celo Sepolia...\n");
|
||||
// Get deployer account
|
||||
const [deployer] = await hardhat_1.ethers.getSigners();
|
||||
console.log("📝 Deploying contracts with account:", deployer.address);
|
||||
// Get account balance
|
||||
const balance = await hardhat_1.ethers.provider.getBalance(deployer.address);
|
||||
console.log("💰 Account balance:", hardhat_1.ethers.formatEther(balance), "CELO\n");
|
||||
// cUSD token address on Sepolia testnet
|
||||
const CUSD_SEPOLIA = "0x874069fa1eb16d44d622f2e0ca25eea172369bc1";
|
||||
console.log("📄 cUSD Token Address:", CUSD_SEPOLIA);
|
||||
// Deploy TaskEscrow contract
|
||||
console.log("\n⏳ Deploying TaskEscrow contract...");
|
||||
const TaskEscrow = await hardhat_1.ethers.getContractFactory("TaskEscrow");
|
||||
const taskEscrow = await TaskEscrow.deploy(CUSD_SEPOLIA);
|
||||
await taskEscrow.waitForDeployment();
|
||||
const taskEscrowAddress = await taskEscrow.getAddress();
|
||||
console.log("✅ TaskEscrow deployed to:", taskEscrowAddress);
|
||||
// Verify contract details
|
||||
console.log("\n📋 Contract Details:");
|
||||
console.log("-----------------------------------");
|
||||
console.log("Network: Celo Sepolia");
|
||||
console.log("Contract: TaskEscrow");
|
||||
console.log("Address:", taskEscrowAddress);
|
||||
console.log("Owner:", deployer.address);
|
||||
console.log("cUSD Token:", CUSD_SEPOLIA);
|
||||
console.log("Platform Fee: 5%");
|
||||
console.log("-----------------------------------\n");
|
||||
// Save deployment info
|
||||
const deploymentInfo = {
|
||||
network: "sepolia",
|
||||
contractName: "TaskEscrow",
|
||||
contractAddress: taskEscrowAddress,
|
||||
deployer: deployer.address,
|
||||
cUSDAddress: CUSD_SEPOLIA,
|
||||
deployedAt: new Date().toISOString(),
|
||||
blockNumber: await hardhat_1.ethers.provider.getBlockNumber(),
|
||||
};
|
||||
console.log("💾 Deployment Info:");
|
||||
console.log(JSON.stringify(deploymentInfo, null, 2));
|
||||
console.log("\n🔍 Verify contract on Celoscan:");
|
||||
console.log(`npx hardhat verify --network sepolia ${taskEscrowAddress} ${CUSD_SEPOLIA}`);
|
||||
console.log("\n✨ Deployment completed successfully!\n");
|
||||
// Return addresses for use in scripts
|
||||
return {
|
||||
taskEscrowAddress,
|
||||
cUSDAddress: CUSD_SEPOLIA,
|
||||
};
|
||||
}
|
||||
// Execute deployment
|
||||
main()
|
||||
.then(() => process.exit(0))
|
||||
.catch((error) => {
|
||||
console.error("❌ Deployment failed:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
72
dmtp/server/scripts/deploy.ts
Normal file
72
dmtp/server/scripts/deploy.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
async function main() {
|
||||
console.log("🚀 Starting deployment to Celo Sepolia...\n");
|
||||
|
||||
// Get deployer account
|
||||
const [deployer] = await ethers.getSigners();
|
||||
console.log("📝 Deploying contracts with account:", deployer.address);
|
||||
|
||||
// Get account balance
|
||||
const balance = await ethers.provider.getBalance(deployer.address);
|
||||
console.log("💰 Account balance:", ethers.formatEther(balance), "CELO\n");
|
||||
|
||||
// cUSD token address on Sepolia testnet
|
||||
const CUSD_SEPOLIA = "0x874069fa1eb16d44d622f2e0ca25eea172369bc1";
|
||||
|
||||
console.log("📄 cUSD Token Address:", CUSD_SEPOLIA);
|
||||
|
||||
// Deploy TaskEscrow contract
|
||||
console.log("\n⏳ Deploying TaskEscrow contract...");
|
||||
const TaskEscrow = await ethers.getContractFactory("TaskEscrow");
|
||||
const taskEscrow = await TaskEscrow.deploy(CUSD_SEPOLIA);
|
||||
|
||||
await taskEscrow.waitForDeployment();
|
||||
const taskEscrowAddress = await taskEscrow.getAddress();
|
||||
|
||||
console.log("✅ TaskEscrow deployed to:", taskEscrowAddress);
|
||||
|
||||
// Verify contract details
|
||||
console.log("\n📋 Contract Details:");
|
||||
console.log("-----------------------------------");
|
||||
console.log("Network: Celo Sepolia");
|
||||
console.log("Contract: TaskEscrow");
|
||||
console.log("Address:", taskEscrowAddress);
|
||||
console.log("Owner:", deployer.address);
|
||||
console.log("cUSD Token:", CUSD_SEPOLIA);
|
||||
console.log("Platform Fee: 5%");
|
||||
console.log("-----------------------------------\n");
|
||||
|
||||
// Save deployment info
|
||||
const deploymentInfo = {
|
||||
network: "sepolia",
|
||||
contractName: "TaskEscrow",
|
||||
contractAddress: taskEscrowAddress,
|
||||
deployer: deployer.address,
|
||||
cUSDAddress: CUSD_SEPOLIA,
|
||||
deployedAt: new Date().toISOString(),
|
||||
blockNumber: await ethers.provider.getBlockNumber(),
|
||||
};
|
||||
|
||||
console.log("💾 Deployment Info:");
|
||||
console.log(JSON.stringify(deploymentInfo, null, 2));
|
||||
|
||||
console.log("\n🔍 Verify contract on Celoscan:");
|
||||
console.log(`npx hardhat verify --network sepolia ${taskEscrowAddress} ${CUSD_SEPOLIA}`);
|
||||
|
||||
console.log("\n✨ Deployment completed successfully!\n");
|
||||
|
||||
// Return addresses for use in scripts
|
||||
return {
|
||||
taskEscrowAddress,
|
||||
cUSDAddress: CUSD_SEPOLIA,
|
||||
};
|
||||
}
|
||||
|
||||
// Execute deployment
|
||||
main()
|
||||
.then(() => process.exit(0))
|
||||
.catch((error) => {
|
||||
console.error("❌ Deployment failed:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
72
dmtp/server/scripts/interact.js
Normal file
72
dmtp/server/scripts/interact.js
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const hardhat_1 = require("hardhat");
|
||||
async function main() {
|
||||
console.log("🔧 Interacting with TaskEscrow contract...\n");
|
||||
// Replace with your deployed contract address
|
||||
const TASK_ESCROW_ADDRESS = "YOUR_DEPLOYED_CONTRACT_ADDRESS";
|
||||
const CUSD_ADDRESS = "0x874069Fa1Eb16D44d622F2e0Ca25eeA172369bC1";
|
||||
const [deployer, worker] = await hardhat_1.ethers.getSigners();
|
||||
// Get contract instances
|
||||
const TaskEscrow = await hardhat_1.ethers.getContractAt("TaskEscrow", TASK_ESCROW_ADDRESS);
|
||||
const cUSD = await hardhat_1.ethers.getContractAt("IERC20", CUSD_ADDRESS);
|
||||
console.log("📝 Contract Address:", TASK_ESCROW_ADDRESS);
|
||||
console.log("👤 Deployer:", deployer.address);
|
||||
console.log("👷 Worker:", worker.address, "\n");
|
||||
// Check cUSD balance
|
||||
const balance = await cUSD.balanceOf(deployer.address);
|
||||
console.log("💰 Deployer cUSD Balance:", hardhat_1.ethers.formatEther(balance), "\n");
|
||||
// Example: Create a task
|
||||
console.log("📝 Creating a task...");
|
||||
const paymentAmount = hardhat_1.ethers.parseEther("5"); // 5 cUSD
|
||||
const durationInDays = 7;
|
||||
// Approve TaskEscrow to spend cUSD
|
||||
console.log("✅ Approving cUSD spending...");
|
||||
const approveTx = await cUSD.approve(TASK_ESCROW_ADDRESS, paymentAmount);
|
||||
await approveTx.wait();
|
||||
console.log("✅ Approved!\n");
|
||||
// Create task
|
||||
const createTx = await TaskEscrow.createTask(paymentAmount, durationInDays);
|
||||
const receipt = await createTx.wait();
|
||||
// Get taskId from event
|
||||
const event = receipt?.logs.find((log) => {
|
||||
try {
|
||||
return TaskEscrow.interface.parseLog(log)?.name === "TaskCreated";
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
const parsedEvent = TaskEscrow.interface.parseLog(event);
|
||||
const taskId = parsedEvent?.args[0];
|
||||
console.log("✅ Task created! Task ID:", taskId.toString(), "\n");
|
||||
// Get task details
|
||||
const task = await TaskEscrow.getTask(taskId);
|
||||
console.log("📋 Task Details:");
|
||||
console.log("-----------------------------------");
|
||||
console.log("Task ID:", task.taskId.toString());
|
||||
console.log("Requester:", task.requester);
|
||||
console.log("Payment Amount:", hardhat_1.ethers.formatEther(task.paymentAmount), "cUSD");
|
||||
console.log("Status:", task.status);
|
||||
console.log("-----------------------------------\n");
|
||||
// Assign worker
|
||||
console.log("👷 Assigning worker...");
|
||||
const assignTx = await TaskEscrow.assignWorker(taskId, worker.address);
|
||||
await assignTx.wait();
|
||||
console.log("✅ Worker assigned!\n");
|
||||
// Approve submission (as owner)
|
||||
console.log("✅ Approving submission...");
|
||||
const approveTx2 = await TaskEscrow.approveSubmission(taskId);
|
||||
await approveTx2.wait();
|
||||
console.log("✅ Submission approved! Payment released.\n");
|
||||
// Check worker balance
|
||||
const workerBalance = await cUSD.balanceOf(worker.address);
|
||||
console.log("💰 Worker cUSD Balance:", hardhat_1.ethers.formatEther(workerBalance), "\n");
|
||||
console.log("✨ Interaction completed!");
|
||||
}
|
||||
main()
|
||||
.then(() => process.exit(0))
|
||||
.catch((error) => {
|
||||
console.error("❌ Error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
87
dmtp/server/scripts/interact.ts
Normal file
87
dmtp/server/scripts/interact.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { ethers } from "hardhat";
|
||||
|
||||
async function main() {
|
||||
console.log("🔧 Interacting with TaskEscrow contract...\n");
|
||||
|
||||
// Replace with your deployed contract address
|
||||
const TASK_ESCROW_ADDRESS = "YOUR_DEPLOYED_CONTRACT_ADDRESS";
|
||||
const CUSD_ADDRESS = "0x874069Fa1Eb16D44d622F2e0Ca25eeA172369bC1";
|
||||
|
||||
const [deployer, worker] = await ethers.getSigners();
|
||||
|
||||
// Get contract instances
|
||||
const TaskEscrow = await ethers.getContractAt("TaskEscrow", TASK_ESCROW_ADDRESS);
|
||||
const cUSD = await ethers.getContractAt("IERC20", CUSD_ADDRESS);
|
||||
|
||||
console.log("📝 Contract Address:", TASK_ESCROW_ADDRESS);
|
||||
console.log("👤 Deployer:", deployer.address);
|
||||
console.log("👷 Worker:", worker.address, "\n");
|
||||
|
||||
// Check cUSD balance
|
||||
const balance = await cUSD.balanceOf(deployer.address);
|
||||
console.log("💰 Deployer cUSD Balance:", ethers.formatEther(balance), "\n");
|
||||
|
||||
// Example: Create a task
|
||||
console.log("📝 Creating a task...");
|
||||
const paymentAmount = ethers.parseEther("5"); // 5 cUSD
|
||||
const durationInDays = 7;
|
||||
|
||||
// Approve TaskEscrow to spend cUSD
|
||||
console.log("✅ Approving cUSD spending...");
|
||||
const approveTx = await cUSD.approve(TASK_ESCROW_ADDRESS, paymentAmount);
|
||||
await approveTx.wait();
|
||||
console.log("✅ Approved!\n");
|
||||
|
||||
// Create task
|
||||
const createTx = await TaskEscrow.createTask(paymentAmount, durationInDays);
|
||||
const receipt = await createTx.wait();
|
||||
|
||||
// Get taskId from event
|
||||
const event = receipt?.logs.find((log: any) => {
|
||||
try {
|
||||
return TaskEscrow.interface.parseLog(log)?.name === "TaskCreated";
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
const parsedEvent = TaskEscrow.interface.parseLog(event as any);
|
||||
const taskId = parsedEvent?.args[0];
|
||||
|
||||
console.log("✅ Task created! Task ID:", taskId.toString(), "\n");
|
||||
|
||||
// Get task details
|
||||
const task = await TaskEscrow.getTask(taskId);
|
||||
console.log("📋 Task Details:");
|
||||
console.log("-----------------------------------");
|
||||
console.log("Task ID:", task.taskId.toString());
|
||||
console.log("Requester:", task.requester);
|
||||
console.log("Payment Amount:", ethers.formatEther(task.paymentAmount), "cUSD");
|
||||
console.log("Status:", task.status);
|
||||
console.log("-----------------------------------\n");
|
||||
|
||||
// Assign worker
|
||||
console.log("👷 Assigning worker...");
|
||||
const assignTx = await TaskEscrow.assignWorker(taskId, worker.address);
|
||||
await assignTx.wait();
|
||||
console.log("✅ Worker assigned!\n");
|
||||
|
||||
// Approve submission (as owner)
|
||||
console.log("✅ Approving submission...");
|
||||
const approveTx2 = await TaskEscrow.approveSubmission(taskId);
|
||||
await approveTx2.wait();
|
||||
console.log("✅ Submission approved! Payment released.\n");
|
||||
|
||||
// Check worker balance
|
||||
const workerBalance = await cUSD.balanceOf(worker.address);
|
||||
console.log("💰 Worker cUSD Balance:", ethers.formatEther(workerBalance), "\n");
|
||||
|
||||
console.log("✨ Interaction completed!");
|
||||
}
|
||||
|
||||
main()
|
||||
.then(() => process.exit(0))
|
||||
.catch((error) => {
|
||||
console.error("❌ Error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
129
dmtp/server/seedTasks.ts
Normal file
129
dmtp/server/seedTasks.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import 'dotenv/config';
|
||||
import { prisma } from './src/database/connections';
|
||||
import { blockchainService } from './src/services/blockchain.service';
|
||||
import { TaskType } from './src/types/database.types';
|
||||
|
||||
/**
|
||||
* Seeds multiple tasks with blockchain integration.
|
||||
*/
|
||||
async function seedTasks() {
|
||||
try {
|
||||
console.log('🚀 Starting blockchain task seeding...\n');
|
||||
|
||||
const requesterWallet = '0xA0e793E7257c065b30c46Ef6828F2B3C0de87A8E';
|
||||
|
||||
// Find or create requester
|
||||
let requester = await prisma.user.findUnique({
|
||||
where: { walletAddress: requesterWallet.toLowerCase() },
|
||||
});
|
||||
|
||||
if (!requester) {
|
||||
console.log('📝 Creating user...');
|
||||
requester = await prisma.user.create({
|
||||
data: {
|
||||
walletAddress: requesterWallet.toLowerCase(),
|
||||
reputationScore: 100,
|
||||
role: 'worker',
|
||||
},
|
||||
});
|
||||
console.log(`✅ User created: ${requester.id}\n`);
|
||||
} else {
|
||||
console.log(`✅ User found: ${requester.id}\n`);
|
||||
}
|
||||
|
||||
// Define tasks to seed
|
||||
const tasks = [
|
||||
{
|
||||
title: 'Grammar Check - Business Email',
|
||||
description: 'Review and verify that the business email text is grammatically correct and uses proper English.',
|
||||
taskType: TaskType.TEXT_VERIFICATION,
|
||||
paymentAmount: 0.01,
|
||||
verificationCriteria: {
|
||||
aiPrompt: 'Check if the text has correct grammar, proper spelling, and uses professional English language.',
|
||||
requiredFields: ['text'],
|
||||
},
|
||||
maxSubmissions: 3,
|
||||
expiresAt: new Date('2025-11-05T00:00:00Z'),
|
||||
},
|
||||
{
|
||||
title: 'English Text Verification - Blog Post',
|
||||
description: 'Verify that the blog post content follows proper English grammar rules and sentence structure.',
|
||||
taskType: TaskType.TEXT_VERIFICATION,
|
||||
paymentAmount: 0.015,
|
||||
verificationCriteria: {
|
||||
aiPrompt: 'Verify the text has correct grammar, punctuation, sentence structure, and uses proper English.',
|
||||
requiredFields: ['text'],
|
||||
},
|
||||
maxSubmissions: 2,
|
||||
expiresAt: new Date('2025-11-08T00:00:00Z'),
|
||||
},
|
||||
{
|
||||
title: 'Grammar Correction - Product Description',
|
||||
description: 'Check product description text for grammar errors and ensure it uses clear, proper English.',
|
||||
taskType: TaskType.TEXT_VERIFICATION,
|
||||
paymentAmount: 0.02,
|
||||
verificationCriteria: {
|
||||
aiPrompt: 'Ensure the text is grammatically correct, has proper punctuation, and uses clear professional English.',
|
||||
requiredFields: ['text'],
|
||||
},
|
||||
maxSubmissions: 5,
|
||||
expiresAt: new Date('2025-11-12T00:00:00Z'),
|
||||
},
|
||||
];
|
||||
|
||||
for (const taskData of tasks) {
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log(`🧩 Creating task: ${taskData.title}`);
|
||||
console.log(`💰 Payment: ${taskData.paymentAmount} cUSD`);
|
||||
console.log(`⏰ Expires: ${taskData.expiresAt}\n`);
|
||||
|
||||
const durationMs = taskData.expiresAt.getTime() - Date.now();
|
||||
const durationInDays = Math.ceil(durationMs / (1000 * 60 * 60 * 24));
|
||||
|
||||
console.log('⛓️ Creating task on blockchain...');
|
||||
const blockchainResult = await blockchainService.createTask(
|
||||
taskData.paymentAmount.toString(),
|
||||
durationInDays
|
||||
);
|
||||
|
||||
console.log(`✅ Blockchain task created!`);
|
||||
console.log(` Contract Task ID: ${blockchainResult.taskId}`);
|
||||
console.log(` Tx Hash: ${blockchainResult.txHash}\n`);
|
||||
|
||||
const dbTask = await prisma.task.create({
|
||||
data: {
|
||||
requesterId: requester.id,
|
||||
title: taskData.title,
|
||||
description: taskData.description,
|
||||
taskType: taskData.taskType,
|
||||
paymentAmount: taskData.paymentAmount,
|
||||
verificationCriteria: taskData.verificationCriteria,
|
||||
maxSubmissions: taskData.maxSubmissions,
|
||||
expiresAt: taskData.expiresAt,
|
||||
contractTaskId: blockchainResult.taskId,
|
||||
status: 'open',
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`✅ Task stored in DB! ID: ${dbTask.id}`);
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n');
|
||||
}
|
||||
|
||||
// Update user stats
|
||||
await prisma.user.update({
|
||||
where: { id: requester.id },
|
||||
data: {
|
||||
totalTasksCreated: { increment: tasks.length },
|
||||
},
|
||||
});
|
||||
|
||||
console.log('✅ All tasks successfully created with blockchain integration!\n');
|
||||
process.exit(0);
|
||||
} catch (error: any) {
|
||||
console.error('\n❌ Error seeding tasks:', error.message);
|
||||
console.error('Stack:', error.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
seedTasks();
|
||||
99
dmtp/server/show-env-info.ts
Normal file
99
dmtp/server/show-env-info.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import 'dotenv/config';
|
||||
import { blockchainService } from './src/services/blockchain.service';
|
||||
import { prisma } from './src/database/connections';
|
||||
|
||||
async function showEnvironmentInfo() {
|
||||
try {
|
||||
console.log('🔍 Environment & Contract Information\n');
|
||||
console.log('━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
|
||||
console.log('\n📁 Working Directory:');
|
||||
console.log(` ${process.cwd()}\n`);
|
||||
|
||||
console.log('🌐 Network Configuration:');
|
||||
console.log(` RPC URL: ${process.env.CELO_RPC_URL || 'https://forno.celo-sepolia.celo-testnet.org'}`);
|
||||
console.log(` Chain ID: ${process.env.CHAIN_ID || '11142220'}\n`);
|
||||
|
||||
console.log('📋 Contract Addresses:');
|
||||
const contractAddress = process.env.CONTRACT_ADDRESS;
|
||||
const cUSDAddress = process.env.CUSD_SEPOLIA_ADDRESS;
|
||||
console.log(` TaskEscrow: ${contractAddress || 'NOT SET'}`);
|
||||
console.log(` cUSD Token: ${cUSDAddress || 'NOT SET'}\n`);
|
||||
|
||||
if (!contractAddress) {
|
||||
console.log('❌ CONTRACT_ADDRESS not configured in .env!');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('⛓️ Blockchain Service:');
|
||||
const actualContract = blockchainService.getContractAddress();
|
||||
console.log(` Connected to: ${actualContract}`);
|
||||
console.log(` Match: ${actualContract.toLowerCase() === contractAddress.toLowerCase() ? '✅' : '❌'}\n`);
|
||||
|
||||
const taskCounter = await blockchainService.getTaskCounter();
|
||||
console.log(` Total tasks on blockchain: ${taskCounter}\n`);
|
||||
|
||||
console.log('💾 Database Statistics:');
|
||||
const totalTasks = await prisma.task.count();
|
||||
const tasksWithContract = await prisma.task.count({
|
||||
where: { contractTaskId: { not: null } },
|
||||
});
|
||||
const activeTasks = await prisma.task.count({
|
||||
where: {
|
||||
status: { in: ['open', 'in_progress'] },
|
||||
contractTaskId: { not: null },
|
||||
},
|
||||
});
|
||||
|
||||
console.log(` Total tasks in DB: ${totalTasks}`);
|
||||
console.log(` Tasks with blockchain: ${tasksWithContract}`);
|
||||
console.log(` Active blockchain tasks: ${activeTasks}\n`);
|
||||
|
||||
if (activeTasks > 0) {
|
||||
console.log('📝 Active Tasks:');
|
||||
const tasks = await prisma.task.findMany({
|
||||
where: {
|
||||
status: { in: ['open', 'in_progress'] },
|
||||
contractTaskId: { not: null },
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
contractTaskId: true,
|
||||
status: true,
|
||||
},
|
||||
take: 5,
|
||||
});
|
||||
|
||||
for (const task of tasks) {
|
||||
console.log(`\n Task: ${task.title.substring(0, 50)}...`);
|
||||
console.log(` DB ID: ${task.id}`);
|
||||
console.log(` Contract ID: ${task.contractTaskId}`);
|
||||
console.log(` Status: ${task.status}`);
|
||||
|
||||
try {
|
||||
const blockchainTask = await blockchainService.getTask(task.contractTaskId!);
|
||||
if (blockchainTask) {
|
||||
const statuses = ['Open', 'InProgress', 'Completed', 'Cancelled', 'Expired'];
|
||||
console.log(` Blockchain: ✅ ${statuses[blockchainTask.status]}`);
|
||||
} else {
|
||||
console.log(` Blockchain: ❌ NOT FOUND`);
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.log(` Blockchain: ❌ Error - ${error.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━');
|
||||
console.log('\n✅ Environment check complete!\n');
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: any) {
|
||||
console.error('\n❌ Error:', error.message);
|
||||
console.error(error.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
showEnvironmentInfo();
|
||||
64
dmtp/server/src/app.ts
Normal file
64
dmtp/server/src/app.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import cors from 'cors';
|
||||
import express, { Application } from 'express';
|
||||
import helmet from 'helmet';
|
||||
import morgan from 'morgan';
|
||||
import { createQueueDashboard } from './dashboard';
|
||||
import { testConnection } from './database/connections';
|
||||
import { ErrorMiddleware } from './middlewares/error.middleware';
|
||||
import routes from './routes';
|
||||
import './workers'; // Start verification worker
|
||||
|
||||
export function createApp(): Application {
|
||||
const app = express();
|
||||
|
||||
// Security middleware
|
||||
app.use(helmet());
|
||||
|
||||
// CORS configuration
|
||||
app.use(
|
||||
cors({
|
||||
origin: [
|
||||
process.env.FRONTEND_URL || 'http://localhost:3000',
|
||||
'http://localhost:4000', // Admin dashboard
|
||||
],
|
||||
credentials: true,
|
||||
methods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'],
|
||||
allowedHeaders: [
|
||||
'Content-Type',
|
||||
'Authorization',
|
||||
'X-Wallet-Address',
|
||||
'X-Signature',
|
||||
'X-Message',
|
||||
'X-Timestamp',
|
||||
],
|
||||
})
|
||||
);
|
||||
|
||||
// Body parsing middleware
|
||||
app.use(express.json({ limit: '10mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
||||
|
||||
// Logging middleware
|
||||
if (process.env.NODE_ENV !== 'test') {
|
||||
app.use(morgan('combined'));
|
||||
}
|
||||
|
||||
app.use('/admin/queues', createQueueDashboard());
|
||||
|
||||
// API routes
|
||||
app.use('/api/v1', routes);
|
||||
|
||||
// 404 handler
|
||||
app.use(ErrorMiddleware.notFound);
|
||||
|
||||
// Global error handler
|
||||
app.use(ErrorMiddleware.handle);
|
||||
|
||||
return app;
|
||||
}
|
||||
|
||||
// Test database connection on startup
|
||||
testConnection().catch((error) => {
|
||||
console.error('Failed to connect to database:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
73
dmtp/server/src/config/ai.config.js
Normal file
73
dmtp/server/src/config/ai.config.js
Normal file
@@ -0,0 +1,73 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.PROMPTS = exports.aiConfig = void 0;
|
||||
exports.aiConfig = {
|
||||
apiKey: process.env.GEMINI_API_KEY || 'AIzaSyBUjw754YS4sZZEWNYk9z2sC30YfiwubQI',
|
||||
model: process.env.GEMINI_MODEL || 'gemini-2.5-flash',
|
||||
temperature: 0, // Consistent results
|
||||
maxOutputTokens: 1024,
|
||||
cache: {
|
||||
enabled: process.env.REDIS_URL ? true : false,
|
||||
ttl: 3600, // 1 hour cache
|
||||
},
|
||||
retry: {
|
||||
maxRetries: 3,
|
||||
initialDelay: 1000, // 1 second
|
||||
maxDelay: 10000, // 10 seconds
|
||||
backoffMultiplier: 2,
|
||||
},
|
||||
rateLimit: {
|
||||
maxRequests: 60, // 60 requests per minute
|
||||
windowMs: 60 * 1000, // 1 minute
|
||||
},
|
||||
};
|
||||
// Prompt templates
|
||||
exports.PROMPTS = {
|
||||
TEXT_VERIFICATION: `You are a task verification assistant. Analyze the following submission against the criteria.
|
||||
|
||||
VERIFICATION CRITERIA:
|
||||
{verificationCriteria}
|
||||
|
||||
USER SUBMISSION:
|
||||
{submissionText}
|
||||
|
||||
TASK:
|
||||
1. Check if the submission meets ALL criteria
|
||||
2. Provide a verification score (0-100)
|
||||
3. List any violations or issues
|
||||
4. Give approval recommendation (APPROVE/REJECT)
|
||||
|
||||
OUTPUT FORMAT (JSON only):
|
||||
{
|
||||
"approved": boolean,
|
||||
"score": number,
|
||||
"violations": string[],
|
||||
"reasoning": string
|
||||
}
|
||||
|
||||
Be strict but fair. Only approve submissions that clearly meet criteria. Return ONLY valid JSON, no markdown or additional text.`,
|
||||
IMAGE_VERIFICATION: `You are an image verification expert. Analyze this image against the task requirements.
|
||||
|
||||
TASK DESCRIPTION:
|
||||
{taskDescription}
|
||||
|
||||
VERIFICATION CRITERIA:
|
||||
{verificationCriteria}
|
||||
|
||||
Analyze the image and determine:
|
||||
1. Does it match the task requirements?
|
||||
2. Is the image quality acceptable (not blurry, proper lighting)?
|
||||
3. Are there any inappropriate or irrelevant elements?
|
||||
4. Quality score (0-100)
|
||||
|
||||
OUTPUT FORMAT (JSON only):
|
||||
{
|
||||
"approved": boolean,
|
||||
"score": number,
|
||||
"image_quality": "excellent" | "good" | "poor",
|
||||
"issues": string[],
|
||||
"reasoning": string
|
||||
}
|
||||
|
||||
Return ONLY valid JSON, no markdown or additional text.`,
|
||||
};
|
||||
81
dmtp/server/src/config/ai.config.ts
Normal file
81
dmtp/server/src/config/ai.config.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
import { AIServiceConfig } from '../types/ai.types';
|
||||
|
||||
export const aiConfig: AIServiceConfig = {
|
||||
apiKey: process.env.GEMINI_API_KEY || 'AIzaSyBUjw754YS4sZZEWNYk9z2sC30YfiwubQI',
|
||||
model: process.env.GEMINI_MODEL || 'gemini-2.5-flash',
|
||||
temperature: 0, // Consistent results
|
||||
maxOutputTokens: 1024,
|
||||
|
||||
cache: {
|
||||
enabled: process.env.REDIS_URL ? true : false,
|
||||
ttl: 3600, // 1 hour cache
|
||||
},
|
||||
|
||||
retry: {
|
||||
maxRetries: 3,
|
||||
initialDelay: 1000, // 1 second
|
||||
maxDelay: 10000, // 10 seconds
|
||||
backoffMultiplier: 2,
|
||||
},
|
||||
|
||||
rateLimit: {
|
||||
maxRequests: 60, // 60 requests per minute
|
||||
windowMs: 60 * 1000, // 1 minute
|
||||
},
|
||||
};
|
||||
|
||||
// Prompt templates
|
||||
export const PROMPTS = {
|
||||
TEXT_VERIFICATION: `You are a task verification assistant. Analyze the following submission against the criteria.
|
||||
|
||||
VERIFICATION CRITERIA:
|
||||
{verificationCriteria}
|
||||
|
||||
USER SUBMISSION:
|
||||
{submissionText}
|
||||
|
||||
TASK:
|
||||
1. Check if the submission meets ALL criteria
|
||||
2. Provide a verification score (0-100)
|
||||
3. List any violations or issues
|
||||
4. Give approval recommendation (APPROVE/REJECT)
|
||||
|
||||
CRITICAL: Respond with ONLY a valid JSON object. No markdown, no code blocks, no explanations.
|
||||
|
||||
Required JSON format:
|
||||
{
|
||||
"approved": true,
|
||||
"score": 85,
|
||||
"violations": ["issue 1", "issue 2"],
|
||||
"reasoning": "Brief explanation"
|
||||
}
|
||||
|
||||
Your response:`,
|
||||
|
||||
IMAGE_VERIFICATION: `You are an image verification expert. Analyze this image against the task requirements.
|
||||
|
||||
TASK DESCRIPTION:
|
||||
{taskDescription}
|
||||
|
||||
VERIFICATION CRITERIA:
|
||||
{verificationCriteria}
|
||||
|
||||
Analyze the image and determine:
|
||||
1. Does it match the task requirements?
|
||||
2. Is the image quality acceptable (not blurry, proper lighting)?
|
||||
3. Are there any inappropriate or irrelevant elements?
|
||||
4. Quality score (0-100)
|
||||
|
||||
CRITICAL: Respond with ONLY a valid JSON object. No markdown, no code blocks, no explanations.
|
||||
|
||||
Required JSON format:
|
||||
{
|
||||
"approved": true,
|
||||
"score": 90,
|
||||
"image_quality": "excellent",
|
||||
"issues": ["issue 1"],
|
||||
"reasoning": "Brief explanation"
|
||||
}
|
||||
|
||||
Your response:`,
|
||||
};
|
||||
98
dmtp/server/src/config/moderation.config.js
Normal file
98
dmtp/server/src/config/moderation.config.js
Normal file
@@ -0,0 +1,98 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ALLOWLIST_PATTERNS = exports.BLOCKLIST_PATTERNS = exports.MODERATION_SYSTEM_PROMPT = exports.moderationConfig = void 0;
|
||||
const moderation_types_1 = require("../types/moderation.types");
|
||||
exports.moderationConfig = {
|
||||
// Confidence threshold for auto-rejection
|
||||
autoRejectThreshold: 85,
|
||||
// Confidence threshold for flagging
|
||||
flagThreshold: 60,
|
||||
// Enable pre-filtering (blocklist/allowlist)
|
||||
enablePreFiltering: true,
|
||||
// Log all moderation decisions
|
||||
enableLogging: true,
|
||||
// Safety settings for Gemini
|
||||
safetySettings: {
|
||||
HARM_CATEGORY_HARASSMENT: 'BLOCK_NONE',
|
||||
HARM_CATEGORY_HATE_SPEECH: 'BLOCK_NONE',
|
||||
HARM_CATEGORY_SEXUALLY_EXPLICIT: 'BLOCK_NONE',
|
||||
HARM_CATEGORY_DANGEROUS_CONTENT: 'BLOCK_NONE',
|
||||
},
|
||||
};
|
||||
// System prompt for content moderation
|
||||
exports.MODERATION_SYSTEM_PROMPT = `You are a content moderation AI. Your task is to detect violations in user submissions for a micro-task platform.
|
||||
|
||||
MODERATION CATEGORIES:
|
||||
1. Spam: Repetitive, nonsensical, or bot-generated content
|
||||
2. Toxic: Rude, disrespectful, offensive language
|
||||
3. Hate Speech: Targets protected characteristics (race, religion, gender, etc.)
|
||||
4. Fraud: Attempts to game the system or provide fake data
|
||||
5. Inappropriate: Sexual, violent, or illegal content
|
||||
|
||||
ANALYSIS STEPS:
|
||||
1. Classify the content across all categories
|
||||
2. Assign severity: LOW/MEDIUM/HIGH/CRITICAL
|
||||
3. For CRITICAL violations, auto-reject immediately
|
||||
4. For MEDIUM/HIGH, flag for manual review
|
||||
5. For LOW, approve with warning
|
||||
|
||||
SEVERITY GUIDELINES:
|
||||
- NONE: No violation detected
|
||||
- LOW: Minor violation, acceptable with warning
|
||||
- MEDIUM: Moderate violation, needs review
|
||||
- HIGH: Serious violation, likely rejection
|
||||
- CRITICAL: Severe violation, immediate auto-reject
|
||||
|
||||
CONFIDENCE LEVELS:
|
||||
- >85%: High confidence, can take automated action
|
||||
- 60-85%: Medium confidence, flag for review
|
||||
- <60%: Low confidence, approve but log
|
||||
|
||||
OUTPUT FORMAT (JSON only):
|
||||
{
|
||||
"flagged": boolean,
|
||||
"categories": {
|
||||
"spam": {"detected": boolean, "confidence": number (0-100), "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": ["specific phrase 1"]},
|
||||
"toxic": {"detected": boolean, "confidence": number, "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": []},
|
||||
"hate_speech": {"detected": boolean, "confidence": number, "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": []},
|
||||
"fraud": {"detected": boolean, "confidence": number, "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": []},
|
||||
"inappropriate": {"detected": boolean, "confidence": number, "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": []}
|
||||
},
|
||||
"action": "APPROVE" | "FLAG_REVIEW" | "AUTO_REJECT",
|
||||
"explanation": "Brief explanation of the decision"
|
||||
}
|
||||
|
||||
IMPORTANT:
|
||||
- Be precise and accurate
|
||||
- High confidence (>85%) is required for auto-rejection
|
||||
- Consider context (sarcasm, educational content, quotes)
|
||||
- Return ONLY valid JSON, no markdown or additional text`;
|
||||
// Blocklist patterns (instant rejection)
|
||||
exports.BLOCKLIST_PATTERNS = [
|
||||
{
|
||||
pattern: /\b(viagra|cialis|pharmacy)\b/gi,
|
||||
category: 'spam',
|
||||
severity: moderation_types_1.ModerationSeverity.CRITICAL,
|
||||
},
|
||||
{
|
||||
pattern: /\b(click here|buy now|limited time|act now)\b.*\b(http|www)\b/gi,
|
||||
category: 'spam',
|
||||
severity: moderation_types_1.ModerationSeverity.HIGH,
|
||||
},
|
||||
{
|
||||
pattern: /(.)\1{10,}/g, // Repeated characters (10+)
|
||||
category: 'spam',
|
||||
severity: moderation_types_1.ModerationSeverity.MEDIUM,
|
||||
},
|
||||
{
|
||||
pattern: /\b(kill yourself|kys)\b/gi,
|
||||
category: 'toxic',
|
||||
severity: moderation_types_1.ModerationSeverity.CRITICAL,
|
||||
},
|
||||
];
|
||||
// Allowlist patterns (skip AI check if matched)
|
||||
exports.ALLOWLIST_PATTERNS = [
|
||||
/^(yes|no|maybe|ok|okay)$/i,
|
||||
/^\d+$/, // Just numbers
|
||||
/^[a-z]{1,3}$/i, // Single letters or short codes
|
||||
];
|
||||
107
dmtp/server/src/config/moderation.config.ts
Normal file
107
dmtp/server/src/config/moderation.config.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { ModerationSeverity } from '../types/moderation.types';
|
||||
|
||||
export const moderationConfig = {
|
||||
// Confidence threshold for auto-rejection
|
||||
autoRejectThreshold: 85,
|
||||
|
||||
// Confidence threshold for flagging
|
||||
flagThreshold: 60,
|
||||
|
||||
// Enable pre-filtering (blocklist/allowlist)
|
||||
enablePreFiltering: true,
|
||||
|
||||
// Log all moderation decisions
|
||||
enableLogging: true,
|
||||
|
||||
// Safety settings for Gemini
|
||||
safetySettings: {
|
||||
HARM_CATEGORY_HARASSMENT: 'BLOCK_NONE',
|
||||
HARM_CATEGORY_HATE_SPEECH: 'BLOCK_NONE',
|
||||
HARM_CATEGORY_SEXUALLY_EXPLICIT: 'BLOCK_NONE',
|
||||
HARM_CATEGORY_DANGEROUS_CONTENT: 'BLOCK_NONE',
|
||||
},
|
||||
};
|
||||
|
||||
// System prompt for content moderation
|
||||
export const MODERATION_SYSTEM_PROMPT = `You are a content moderation AI. Your task is to detect violations in user submissions for a micro-task platform.
|
||||
|
||||
MODERATION CATEGORIES:
|
||||
1. Spam: Repetitive, nonsensical, or bot-generated content
|
||||
2. Toxic: Rude, disrespectful, offensive language
|
||||
3. Hate Speech: Targets protected characteristics (race, religion, gender, etc.)
|
||||
4. Fraud: Attempts to game the system or provide fake data
|
||||
5. Inappropriate: Sexual, violent, or illegal content
|
||||
|
||||
ANALYSIS STEPS:
|
||||
1. Classify the content across all categories
|
||||
2. Assign severity: LOW/MEDIUM/HIGH/CRITICAL
|
||||
3. For CRITICAL violations, auto-reject immediately
|
||||
4. For MEDIUM/HIGH, flag for manual review
|
||||
5. For LOW, approve with warning
|
||||
|
||||
SEVERITY GUIDELINES:
|
||||
- NONE: No violation detected
|
||||
- LOW: Minor violation, acceptable with warning
|
||||
- MEDIUM: Moderate violation, needs review
|
||||
- HIGH: Serious violation, likely rejection
|
||||
- CRITICAL: Severe violation, immediate auto-reject
|
||||
|
||||
CONFIDENCE LEVELS:
|
||||
- >85%: High confidence, can take automated action
|
||||
- 60-85%: Medium confidence, flag for review
|
||||
- <60%: Low confidence, approve but log
|
||||
|
||||
OUTPUT FORMAT (JSON only):
|
||||
{
|
||||
"flagged": boolean,
|
||||
"categories": {
|
||||
"spam": {"detected": boolean, "confidence": number (0-100), "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": ["specific phrase 1"]},
|
||||
"toxic": {"detected": boolean, "confidence": number, "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": []},
|
||||
"hate_speech": {"detected": boolean, "confidence": number, "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": []},
|
||||
"fraud": {"detected": boolean, "confidence": number, "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": []},
|
||||
"inappropriate": {"detected": boolean, "confidence": number, "severity": "NONE|LOW|MEDIUM|HIGH|CRITICAL", "examples": []}
|
||||
},
|
||||
"action": "APPROVE" | "FLAG_REVIEW" | "AUTO_REJECT",
|
||||
"explanation": "Brief explanation of the decision"
|
||||
}
|
||||
|
||||
IMPORTANT:
|
||||
- Be precise and accurate
|
||||
- High confidence (>85%) is required for auto-rejection
|
||||
- Consider context (sarcasm, educational content, quotes)
|
||||
- Return ONLY valid JSON, no markdown or additional text`;
|
||||
|
||||
// Blocklist patterns (instant rejection)
|
||||
export const BLOCKLIST_PATTERNS: Array<{
|
||||
pattern: RegExp;
|
||||
category: string;
|
||||
severity: ModerationSeverity;
|
||||
}> = [
|
||||
{
|
||||
pattern: /\b(viagra|cialis|pharmacy)\b/gi,
|
||||
category: 'spam',
|
||||
severity: ModerationSeverity.CRITICAL,
|
||||
},
|
||||
{
|
||||
pattern: /\b(click here|buy now|limited time|act now)\b.*\b(http|www)\b/gi,
|
||||
category: 'spam',
|
||||
severity: ModerationSeverity.HIGH,
|
||||
},
|
||||
{
|
||||
pattern: /(.)\1{10,}/g, // Repeated characters (10+)
|
||||
category: 'spam',
|
||||
severity: ModerationSeverity.MEDIUM,
|
||||
},
|
||||
{
|
||||
pattern: /\b(kill yourself|kys)\b/gi,
|
||||
category: 'toxic',
|
||||
severity: ModerationSeverity.CRITICAL,
|
||||
},
|
||||
];
|
||||
|
||||
// Allowlist patterns (skip AI check if matched)
|
||||
export const ALLOWLIST_PATTERNS: RegExp[] = [
|
||||
/^(yes|no|maybe|ok|okay)$/i,
|
||||
/^\d+$/, // Just numbers
|
||||
/^[a-z]{1,3}$/i, // Single letters or short codes
|
||||
];
|
||||
95
dmtp/server/src/config/redis.config.js
Normal file
95
dmtp/server/src/config/redis.config.js
Normal file
@@ -0,0 +1,95 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.queueOptions = exports.redisConfig = void 0;
|
||||
exports.parseRedisUrl = parseRedisUrl;
|
||||
exports.testRedisConnection = testRedisConnection;
|
||||
// Parse Upstash Redis URL if provided
|
||||
function parseRedisUrl(url) {
|
||||
if (!url) {
|
||||
console.warn('⚠️ REDIS_URL not provided, using local Redis');
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
const parsed = new URL(url);
|
||||
return {
|
||||
redis: {
|
||||
host: parsed.hostname,
|
||||
port: parseInt(parsed.port) || 6379,
|
||||
password: parsed.password,
|
||||
username: parsed.username || 'default',
|
||||
tls: parsed.protocol === 'rediss:' ? {
|
||||
rejectUnauthorized: false, // For Upstash
|
||||
} : undefined,
|
||||
enableOfflineQueue: true, // Changed to true
|
||||
maxRetriesPerRequest: null, // Important for Bull
|
||||
connectTimeout: 10000,
|
||||
retryStrategy(times) {
|
||||
const delay = Math.min(times * 50, 2000);
|
||||
return delay;
|
||||
},
|
||||
reconnectOnError(err) {
|
||||
const targetError = 'READONLY';
|
||||
if (err.message.includes(targetError)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
console.error('❌ Failed to parse Redis URL:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
// Use REDIS_URL from environment or fall back to local (for development)
|
||||
const REDIS_URL = "rediss://default:AVftAAIncDIyMjNiZDFmNDQ1ZjI0YmQ3YTllZDUwZmQ5YTE4ZWZlNXAyMjI1MDk@dear-rattler-22509.upstash.io:6379";
|
||||
const parsedConfig = parseRedisUrl(REDIS_URL);
|
||||
exports.redisConfig = parsedConfig || {
|
||||
redis: {
|
||||
host: process.env.REDIS_HOST || 'dear-rattler-22509.upstash.io',
|
||||
port: parseInt(process.env.REDIS_PORT || '6379'),
|
||||
password: process.env.REDIS_PASSWORD,
|
||||
enableOfflineQueue: true,
|
||||
maxRetriesPerRequest: null, // Important for Bull
|
||||
retryStrategy(times) {
|
||||
const delay = Math.min(times * 50, 2000);
|
||||
return delay;
|
||||
},
|
||||
},
|
||||
};
|
||||
// Queue options with retry and timeout
|
||||
exports.queueOptions = {
|
||||
redis: exports.redisConfig.redis,
|
||||
defaultJobOptions: {
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 2000,
|
||||
},
|
||||
timeout: 30000,
|
||||
removeOnComplete: 100,
|
||||
removeOnFail: 500,
|
||||
},
|
||||
};
|
||||
// Connection test helper
|
||||
async function testRedisConnection() {
|
||||
const Redis = require('ioredis');
|
||||
const client = new Redis(exports.redisConfig.redis);
|
||||
return new Promise((resolve, reject) => {
|
||||
client.on('connect', () => {
|
||||
console.log('✅ Redis connected successfully');
|
||||
client.quit();
|
||||
resolve(true);
|
||||
});
|
||||
client.on('error', (err) => {
|
||||
console.error('❌ Redis connection error:', err.message);
|
||||
client.quit();
|
||||
reject(err);
|
||||
});
|
||||
setTimeout(() => {
|
||||
client.quit();
|
||||
reject(new Error('Redis connection timeout'));
|
||||
}, 10000);
|
||||
});
|
||||
}
|
||||
100
dmtp/server/src/config/redis.config.ts
Normal file
100
dmtp/server/src/config/redis.config.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { QueueOptions } from 'bull';
|
||||
|
||||
// Parse Upstash Redis URL if provided
|
||||
export function parseRedisUrl(url?: string) {
|
||||
if (!url) {
|
||||
console.warn('⚠️ REDIS_URL not provided, using local Redis');
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = new URL(url);
|
||||
return {
|
||||
redis: {
|
||||
host: parsed.hostname,
|
||||
port: parseInt(parsed.port) || 6379,
|
||||
password: parsed.password,
|
||||
username: parsed.username || 'default',
|
||||
tls: parsed.protocol === 'rediss:' ? {
|
||||
rejectUnauthorized: false, // For Upstash
|
||||
} : undefined,
|
||||
enableOfflineQueue: true, // Changed to true
|
||||
maxRetriesPerRequest: null, // Important for Bull
|
||||
connectTimeout: 10000,
|
||||
retryStrategy(times) {
|
||||
const delay = Math.min(times * 50, 2000);
|
||||
return delay;
|
||||
},
|
||||
reconnectOnError(err) {
|
||||
const targetError = 'READONLY';
|
||||
if (err.message.includes(targetError)) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to parse Redis URL:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Use REDIS_URL from environment or fall back to local (for development)
|
||||
const REDIS_URL = "rediss://default:AVftAAIncDIyMjNiZDFmNDQ1ZjI0YmQ3YTllZDUwZmQ5YTE4ZWZlNXAyMjI1MDk@dear-rattler-22509.upstash.io:6379";
|
||||
|
||||
const parsedConfig = parseRedisUrl(REDIS_URL);
|
||||
|
||||
export const redisConfig = parsedConfig || {
|
||||
redis: {
|
||||
host: process.env.REDIS_HOST || 'dear-rattler-22509.upstash.io',
|
||||
port: parseInt(process.env.REDIS_PORT || '6379'),
|
||||
password: process.env.REDIS_PASSWORD,
|
||||
enableOfflineQueue: true,
|
||||
maxRetriesPerRequest: null, // Important for Bull
|
||||
retryStrategy(times) {
|
||||
const delay = Math.min(times * 50, 2000);
|
||||
return delay;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Queue options with retry and timeout
|
||||
export const queueOptions: QueueOptions = {
|
||||
redis: redisConfig.redis,
|
||||
defaultJobOptions: {
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 2000,
|
||||
},
|
||||
timeout: 30000,
|
||||
removeOnComplete: 100,
|
||||
removeOnFail: 500,
|
||||
},
|
||||
};
|
||||
|
||||
// Connection test helper
|
||||
export async function testRedisConnection() {
|
||||
const Redis = require('ioredis');
|
||||
const client = new Redis(redisConfig.redis);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
client.on('connect', () => {
|
||||
console.log('✅ Redis connected successfully');
|
||||
client.quit();
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
client.on('error', (err: Error) => {
|
||||
console.error('❌ Redis connection error:', err.message);
|
||||
client.quit();
|
||||
reject(err);
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
client.quit();
|
||||
reject(new Error('Redis connection timeout'));
|
||||
}, 10000);
|
||||
});
|
||||
}
|
||||
451
dmtp/server/src/controllers/submission.controller.js
Normal file
451
dmtp/server/src/controllers/submission.controller.js
Normal file
@@ -0,0 +1,451 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SubmissionController = void 0;
|
||||
const connections_1 = require("../database/connections");
|
||||
const verification_queue_1 = require("../queues/verification.queue");
|
||||
const database_types_1 = require("../types/database.types");
|
||||
const response_util_1 = require("../utils/response.util");
|
||||
class SubmissionController {
|
||||
/**
|
||||
* POST /api/submissions/submit
|
||||
* Submit a task
|
||||
*/
|
||||
static async submitTask(req, res) {
|
||||
try {
|
||||
const submissionData = req.body;
|
||||
const workerId = req.user.userId;
|
||||
console.log(`\n📤 Worker ${workerId} submitting task ${submissionData.taskId}`);
|
||||
// Step 1: Get task details
|
||||
const task = await connections_1.prisma.task.findUnique({
|
||||
where: { id: submissionData.taskId },
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
submissions: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
if (!task) {
|
||||
response_util_1.ResponseUtil.notFound(res, "Task");
|
||||
return;
|
||||
}
|
||||
// Step 2: Validate task is still open
|
||||
if (task.status !== database_types_1.TaskStatus.OPEN) {
|
||||
response_util_1.ResponseUtil.error(res, "Task is not open for submissions", "TASK_NOT_OPEN", 400);
|
||||
return;
|
||||
}
|
||||
// Step 3: Check if task expired
|
||||
if (task.expiresAt < new Date()) {
|
||||
response_util_1.ResponseUtil.error(res, "Task has expired", "TASK_EXPIRED", 400);
|
||||
return;
|
||||
}
|
||||
// Step 4: Check if max submissions reached
|
||||
if (task._count.submissions >= task.maxSubmissions) {
|
||||
response_util_1.ResponseUtil.error(res, "Task has reached maximum submissions", "MAX_SUBMISSIONS_REACHED", 400);
|
||||
return;
|
||||
}
|
||||
// Step 5: Check if worker already submitted
|
||||
const existingSubmission = await connections_1.prisma.submission.findUnique({
|
||||
where: {
|
||||
taskId_workerId: {
|
||||
taskId: submissionData.taskId,
|
||||
workerId: workerId,
|
||||
},
|
||||
},
|
||||
});
|
||||
if (existingSubmission) {
|
||||
response_util_1.ResponseUtil.error(res, "You have already submitted for this task", "DUPLICATE_SUBMISSION", 400);
|
||||
return;
|
||||
}
|
||||
// Step 6: Check if worker is not the requester
|
||||
if (task.requesterId === workerId) {
|
||||
response_util_1.ResponseUtil.error(res, "You cannot submit to your own task", "SELF_SUBMISSION", 400);
|
||||
return;
|
||||
}
|
||||
// Step 7: Create submission
|
||||
const submission = await connections_1.prisma.submission.create({
|
||||
data: {
|
||||
taskId: submissionData.taskId,
|
||||
workerId: workerId,
|
||||
submissionData: submissionData.submissionData,
|
||||
verificationStatus: database_types_1.VerificationStatus.PENDING,
|
||||
},
|
||||
include: {
|
||||
task: {
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
paymentAmount: true,
|
||||
contractTaskId: true,
|
||||
},
|
||||
},
|
||||
worker: {
|
||||
select: {
|
||||
walletAddress: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
// Update task status to in_progress
|
||||
if (task.status === database_types_1.TaskStatus.OPEN) {
|
||||
await connections_1.prisma.task.update({
|
||||
where: { id: task.id },
|
||||
data: { status: database_types_1.TaskStatus.IN_PROGRESS },
|
||||
});
|
||||
}
|
||||
console.log(`✅ Submission created: ${submission.id}`);
|
||||
// Step 7.2: Create pending payment record
|
||||
try {
|
||||
await connections_1.prisma.payment.create({
|
||||
data: {
|
||||
taskId: submissionData.taskId,
|
||||
workerId: workerId,
|
||||
amount: submission.task.paymentAmount,
|
||||
transactionHash: "pending", // Placeholder until blockchain confirms
|
||||
status: "pending",
|
||||
},
|
||||
});
|
||||
console.log(`✅ Payment record created with pending status`);
|
||||
}
|
||||
catch (paymentError) {
|
||||
console.error("Failed to create payment record:", paymentError);
|
||||
// Don't fail the submission if payment record creation fails
|
||||
}
|
||||
// Step 7.5: Assign worker on blockchain
|
||||
if (submission.task.contractTaskId) {
|
||||
try {
|
||||
const { blockchainService } = await Promise.resolve().then(() => __importStar(require('../services/blockchain.service')));
|
||||
await blockchainService.assignWorker(submission.task.contractTaskId, submission.worker.walletAddress);
|
||||
console.log(`✅ Worker assigned on blockchain`);
|
||||
}
|
||||
catch (blockchainError) {
|
||||
console.error('Failed to assign worker on blockchain:', blockchainError);
|
||||
// Continue even if blockchain assignment fails - the verification will catch it
|
||||
}
|
||||
}
|
||||
// Step 8: Trigger async verification
|
||||
try {
|
||||
await (0, verification_queue_1.addVerificationJob)({
|
||||
submissionId: submission.id,
|
||||
taskId: submissionData.taskId,
|
||||
workerId: workerId,
|
||||
submissionData: submissionData.submissionData,
|
||||
verificationCriteria: task.verificationCriteria,
|
||||
taskType: task.taskType,
|
||||
});
|
||||
console.log(`✅ Submission added to verification queue`);
|
||||
}
|
||||
catch (queueError) {
|
||||
console.error("Failed to add job to queue:", queueError);
|
||||
}
|
||||
response_util_1.ResponseUtil.success(res, {
|
||||
submissionId: submission.id,
|
||||
status: "pending",
|
||||
message: "Submission received. AI verification in progress.",
|
||||
estimatedTime: "1-2 minutes",
|
||||
}, 201);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Submit task error:", error);
|
||||
response_util_1.ResponseUtil.internalError(res, "Failed to submit task");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* POST /api/submissions/verify-webhook
|
||||
* Internal webhook for verification results (called by worker)
|
||||
*/
|
||||
static async verifyWebhook(req, res) {
|
||||
try {
|
||||
const webhookData = req.body;
|
||||
console.log(`\n🔔 Verification webhook for submission ${webhookData.submissionId}`);
|
||||
// Verify webhook secret (add to headers in production)
|
||||
const webhookSecret = req.headers["x-webhook-secret"];
|
||||
if (webhookSecret !== process.env.WEBHOOK_SECRET) {
|
||||
response_util_1.ResponseUtil.unauthorized(res, "Invalid webhook secret");
|
||||
return;
|
||||
}
|
||||
const submission = await connections_1.prisma.submission.findUnique({
|
||||
where: { id: webhookData.submissionId },
|
||||
include: {
|
||||
task: true,
|
||||
worker: true,
|
||||
},
|
||||
});
|
||||
if (!submission) {
|
||||
response_util_1.ResponseUtil.notFound(res, "Submission");
|
||||
return;
|
||||
}
|
||||
// Update submission with verification result
|
||||
await connections_1.prisma.submission.update({
|
||||
where: { id: webhookData.submissionId },
|
||||
data: {
|
||||
aiVerificationResult: webhookData.verificationResult,
|
||||
verificationStatus: webhookData.verificationResult.approved
|
||||
? database_types_1.VerificationStatus.APPROVED
|
||||
: database_types_1.VerificationStatus.REJECTED,
|
||||
},
|
||||
});
|
||||
// If approved, trigger payment processing
|
||||
if (webhookData.verificationResult.approved) {
|
||||
console.log(`✅ Submission approved! Processing payment for submission ${webhookData.submissionId}`);
|
||||
try {
|
||||
// Import payment service
|
||||
const { paymentService } = await Promise.resolve().then(() => __importStar(require("../services/payment.service")));
|
||||
// Check if contractTaskId exists
|
||||
if (!submission.task.contractTaskId) {
|
||||
console.warn(`⚠️ Contract task ID not found for task ${submission.taskId}. Processing local payment.`);
|
||||
try {
|
||||
// Update payment record directly without blockchain confirmation
|
||||
await connections_1.prisma.payment.updateMany({
|
||||
where: {
|
||||
taskId: submission.taskId,
|
||||
workerId: submission.workerId,
|
||||
},
|
||||
data: {
|
||||
transactionHash: `local-${submission.taskId}`,
|
||||
status: "completed",
|
||||
},
|
||||
});
|
||||
console.log(`✅ Payment marked as completed with local reference`);
|
||||
}
|
||||
catch (localPaymentError) {
|
||||
console.error(`❌ Failed to process local payment:`, localPaymentError);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Release payment via blockchain
|
||||
const paymentResult = await paymentService.approveSubmissionWithRetry(submission.taskId, submission.id, submission.workerId, submission.task.contractTaskId, submission.task.paymentAmount.toString());
|
||||
if (paymentResult.success) {
|
||||
console.log(`💰 Payment released successfully! Tx: ${paymentResult.txHash}`);
|
||||
}
|
||||
else {
|
||||
console.error(`❌ Payment failed: ${paymentResult.error}`);
|
||||
}
|
||||
}
|
||||
// Update task status to completed
|
||||
await connections_1.prisma.task.update({
|
||||
where: { id: submission.taskId },
|
||||
data: { status: database_types_1.TaskStatus.COMPLETED },
|
||||
});
|
||||
// Update worker earnings
|
||||
await connections_1.prisma.user.update({
|
||||
where: { id: submission.workerId },
|
||||
data: {
|
||||
totalEarnings: {
|
||||
increment: submission.task.paymentAmount,
|
||||
},
|
||||
totalTasksCompleted: {
|
||||
increment: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(`✅ Worker ${submission.workerId} earnings updated`);
|
||||
}
|
||||
catch (paymentError) {
|
||||
console.error(`❌ Error processing payment:`, paymentError);
|
||||
// Don't fail the webhook response, just log the error
|
||||
}
|
||||
}
|
||||
response_util_1.ResponseUtil.success(res, { message: "Verification result processed" });
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Verify webhook error:", error);
|
||||
response_util_1.ResponseUtil.internalError(res, "Failed to process verification");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* GET /api/submissions/:submissionId/status
|
||||
* Get submission status
|
||||
*/
|
||||
static async getSubmissionStatus(req, res) {
|
||||
try {
|
||||
const { submissionId } = req.params;
|
||||
const userId = req.user.userId;
|
||||
const submission = await connections_1.prisma.submission.findUnique({
|
||||
where: { id: submissionId },
|
||||
include: {
|
||||
task: {
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
paymentAmount: true,
|
||||
requesterId: true,
|
||||
},
|
||||
},
|
||||
worker: {
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
if (!submission) {
|
||||
response_util_1.ResponseUtil.notFound(res, "Submission");
|
||||
return;
|
||||
}
|
||||
// Check if user has access to this submission
|
||||
const isOwner = submission.workerId === userId ||
|
||||
submission.task.requesterId === userId;
|
||||
if (!isOwner) {
|
||||
response_util_1.ResponseUtil.forbidden(res, "You do not have access to this submission");
|
||||
return;
|
||||
}
|
||||
// Get payment info if exists
|
||||
const payment = await connections_1.prisma.payment.findFirst({
|
||||
where: {
|
||||
taskId: submission.taskId,
|
||||
workerId: submission.workerId,
|
||||
},
|
||||
});
|
||||
response_util_1.ResponseUtil.success(res, {
|
||||
submission: {
|
||||
id: submission.id,
|
||||
status: submission.verificationStatus,
|
||||
submittedAt: submission.createdAt,
|
||||
verificationResult: submission.aiVerificationResult,
|
||||
},
|
||||
task: submission.task,
|
||||
payment: payment || null,
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Get submission status error:", error);
|
||||
response_util_1.ResponseUtil.internalError(res, "Failed to fetch submission status");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* GET /api/submissions/:submissionId/payment
|
||||
* Get detailed payment status for a submission
|
||||
*/
|
||||
static async getPaymentStatus(req, res) {
|
||||
try {
|
||||
const { submissionId } = req.params;
|
||||
const userId = req.user.userId;
|
||||
const submission = await connections_1.prisma.submission.findUnique({
|
||||
where: { id: submissionId },
|
||||
include: {
|
||||
task: {
|
||||
select: {
|
||||
id: true,
|
||||
requesterId: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
if (!submission) {
|
||||
response_util_1.ResponseUtil.notFound(res, "Submission");
|
||||
return;
|
||||
}
|
||||
// Check if user has access to this submission
|
||||
const isOwner = submission.workerId === userId ||
|
||||
submission.task.requesterId === userId;
|
||||
if (!isOwner) {
|
||||
response_util_1.ResponseUtil.forbidden(res, "You do not have access to this payment");
|
||||
return;
|
||||
}
|
||||
// Get payment info
|
||||
const payment = await connections_1.prisma.payment.findFirst({
|
||||
where: {
|
||||
taskId: submission.taskId,
|
||||
workerId: submission.workerId,
|
||||
},
|
||||
});
|
||||
if (!payment) {
|
||||
response_util_1.ResponseUtil.notFound(res, "Payment");
|
||||
return;
|
||||
}
|
||||
response_util_1.ResponseUtil.success(res, {
|
||||
payment: {
|
||||
id: payment.id,
|
||||
amount: payment.amount,
|
||||
status: payment.status,
|
||||
transactionHash: payment.transactionHash,
|
||||
createdAt: payment.createdAt,
|
||||
submissionId: submission.id,
|
||||
verificationStatus: submission.verificationStatus,
|
||||
},
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Get payment status error:", error);
|
||||
response_util_1.ResponseUtil.internalError(res, "Failed to fetch payment status");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* GET /api/submissions/my-submissions
|
||||
* Get submissions by authenticated worker
|
||||
*/
|
||||
static async getMySubmissions(req, res) {
|
||||
try {
|
||||
const workerId = req.user.userId;
|
||||
const submissions = await connections_1.prisma.submission.findMany({
|
||||
where: { workerId },
|
||||
orderBy: { createdAt: "desc" },
|
||||
include: {
|
||||
task: {
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
paymentAmount: true,
|
||||
taskType: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
// Get payment info for each
|
||||
const submissionsWithPayments = await Promise.all(submissions.map(async (submission) => {
|
||||
const payment = await connections_1.prisma.payment.findFirst({
|
||||
where: {
|
||||
taskId: submission.taskId,
|
||||
workerId: submission.workerId,
|
||||
},
|
||||
});
|
||||
return {
|
||||
...submission,
|
||||
payment: payment || null,
|
||||
};
|
||||
}));
|
||||
response_util_1.ResponseUtil.success(res, submissionsWithPayments);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Get my submissions error:", error);
|
||||
response_util_1.ResponseUtil.internalError(res, "Failed to fetch submissions");
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.SubmissionController = SubmissionController;
|
||||
242
dmtp/server/src/controllers/task.controller.js
Normal file
242
dmtp/server/src/controllers/task.controller.js
Normal file
@@ -0,0 +1,242 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TaskController = void 0;
|
||||
const connections_1 = require("../database/connections");
|
||||
const blockchain_service_1 = require("../services/blockchain.service");
|
||||
const database_types_1 = require("../types/database.types");
|
||||
const response_util_1 = require("../utils/response.util");
|
||||
class TaskController {
|
||||
/**
|
||||
* POST /api/tasks/create
|
||||
* Create a new task
|
||||
*/
|
||||
static async createTask(req, res) {
|
||||
try {
|
||||
const taskData = req.body;
|
||||
const walletAddress = req.user.walletAddress;
|
||||
console.log(`\n📝 Creating task for requester: ${walletAddress}`);
|
||||
// Step 1: Validate requester has sufficient cUSD balance
|
||||
const balance = await blockchain_service_1.blockchainService.getCUSDBalance(walletAddress);
|
||||
const balanceNum = parseFloat(balance);
|
||||
if (balanceNum < taskData.paymentAmount) {
|
||||
response_util_1.ResponseUtil.error(res, `Insufficient balance. Required: ${taskData.paymentAmount} cUSD, Available: ${balanceNum} cUSD`, "INSUFFICIENT_BALANCE", 400);
|
||||
return;
|
||||
}
|
||||
// Step 2: Calculate duration in days
|
||||
const expiresAt = new Date(taskData.expiresAt);
|
||||
const now = new Date();
|
||||
const durationMs = expiresAt.getTime() - now.getTime();
|
||||
const durationInDays = Math.ceil(durationMs / (1000 * 60 * 60 * 24));
|
||||
// Step 3: Create task on blockchain
|
||||
console.log("⛓️ Creating task on blockchain...");
|
||||
const blockchainResult = await blockchain_service_1.blockchainService.createTask(taskData.paymentAmount.toString(), durationInDays);
|
||||
// Step 4: Store task metadata in database
|
||||
console.log("💾 Storing task in database...");
|
||||
const task = await connections_1.prisma.task.create({
|
||||
data: {
|
||||
requesterId: req.user.userId,
|
||||
title: taskData.title,
|
||||
description: taskData.description,
|
||||
taskType: taskData.taskType,
|
||||
paymentAmount: taskData.paymentAmount,
|
||||
verificationCriteria: taskData.verificationCriteria,
|
||||
maxSubmissions: taskData.maxSubmissions,
|
||||
expiresAt: expiresAt,
|
||||
contractTaskId: blockchainResult.taskId, // ← This should now work
|
||||
status: database_types_1.TaskStatus.OPEN,
|
||||
},
|
||||
include: {
|
||||
requester: {
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
reputationScore: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
// Update requester's task count
|
||||
await connections_1.prisma.user.update({
|
||||
where: { id: req.user.userId },
|
||||
data: {
|
||||
totalTasksCreated: {
|
||||
// ← This should now work
|
||||
increment: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(`✅ Task created successfully! ID: ${task.id}`);
|
||||
response_util_1.ResponseUtil.success(res, {
|
||||
task,
|
||||
blockchain: {
|
||||
taskId: blockchainResult.taskId,
|
||||
transactionHash: blockchainResult.txHash,
|
||||
},
|
||||
}, 201);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Create task error:", error);
|
||||
response_util_1.ResponseUtil.internalError(res, `Failed to create task: ${error}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* GET /api/tasks/list
|
||||
* Get paginated list of tasks
|
||||
*/
|
||||
static async listTasks(req, res) {
|
||||
try {
|
||||
const query = {
|
||||
status: req.query.status,
|
||||
taskType: req.query.taskType,
|
||||
limit: parseInt(req.query.limit) || 20,
|
||||
offset: parseInt(req.query.offset) || 0,
|
||||
sortBy: req.query.sortBy || "paymentAmount",
|
||||
sortOrder: req.query.sortOrder || "desc",
|
||||
};
|
||||
// Build where clause
|
||||
const where = {};
|
||||
if (query.status) {
|
||||
where.status = query.status;
|
||||
}
|
||||
if (query.taskType) {
|
||||
where.taskType = query.taskType;
|
||||
}
|
||||
// Only show non-expired tasks
|
||||
where.expiresAt = {
|
||||
gt: new Date(),
|
||||
};
|
||||
// Build orderBy clause
|
||||
const orderBy = {};
|
||||
orderBy[query.sortBy] = query.sortOrder;
|
||||
// Query tasks
|
||||
const [tasks, total] = await Promise.all([
|
||||
connections_1.prisma.task.findMany({
|
||||
where,
|
||||
orderBy,
|
||||
skip: query.offset,
|
||||
take: query.limit,
|
||||
include: {
|
||||
requester: {
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
reputationScore: true,
|
||||
},
|
||||
},
|
||||
_count: {
|
||||
select: {
|
||||
submissions: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
connections_1.prisma.task.count({ where }),
|
||||
]);
|
||||
// Add calculated fields
|
||||
const tasksWithExtras = tasks.map((task) => ({
|
||||
...task,
|
||||
submissionCount: task._count.submissions,
|
||||
spotsRemaining: task.maxSubmissions - task._count.submissions,
|
||||
timeRemaining: task.expiresAt.getTime() - Date.now(),
|
||||
isExpiringSoon: task.expiresAt.getTime() - Date.now() < 24 * 60 * 60 * 1000, // < 24 hours
|
||||
}));
|
||||
response_util_1.ResponseUtil.success(res, tasksWithExtras, 200, {
|
||||
page: Math.floor(query.offset / query.limit) + 1,
|
||||
limit: query.limit,
|
||||
total,
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
console.error("List tasks error:", error);
|
||||
response_util_1.ResponseUtil.internalError(res, "Failed to fetch tasks");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* GET /api/tasks/:taskId
|
||||
* Get single task details
|
||||
*/
|
||||
static async getTask(req, res) {
|
||||
try {
|
||||
const { taskId } = req.params;
|
||||
const task = await connections_1.prisma.task.findUnique({
|
||||
where: { id: taskId },
|
||||
include: {
|
||||
requester: {
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
reputationScore: true,
|
||||
totalTasksCreated: true,
|
||||
},
|
||||
},
|
||||
submissions: {
|
||||
select: {
|
||||
id: true,
|
||||
workerId: true,
|
||||
verificationStatus: true,
|
||||
createdAt: true,
|
||||
},
|
||||
},
|
||||
_count: {
|
||||
select: {
|
||||
submissions: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
if (!task) {
|
||||
response_util_1.ResponseUtil.notFound(res, "Task");
|
||||
return;
|
||||
}
|
||||
// Add calculated fields
|
||||
const taskWithExtras = {
|
||||
...task,
|
||||
submissionCount: task._count.submissions,
|
||||
spotsRemaining: task.maxSubmissions - task._count.submissions,
|
||||
timeRemaining: task.expiresAt.getTime() - Date.now(),
|
||||
isExpired: task.expiresAt.getTime() < Date.now(),
|
||||
canSubmit: task.status === database_types_1.TaskStatus.OPEN &&
|
||||
task.expiresAt.getTime() > Date.now() &&
|
||||
task._count.submissions < task.maxSubmissions,
|
||||
};
|
||||
response_util_1.ResponseUtil.success(res, taskWithExtras);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Get task error:", error);
|
||||
response_util_1.ResponseUtil.internalError(res, "Failed to fetch task");
|
||||
}
|
||||
}
|
||||
/**
|
||||
* GET /api/tasks/my-tasks
|
||||
* Get tasks created by authenticated user
|
||||
*/
|
||||
static async getMyTasks(req, res) {
|
||||
try {
|
||||
const userId = req.user.userId;
|
||||
const tasks = await connections_1.prisma.task.findMany({
|
||||
where: { requesterId: userId },
|
||||
orderBy: { createdAt: "desc" },
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
submissions: true,
|
||||
payments: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
const tasksWithStats = tasks.map((task) => ({
|
||||
...task,
|
||||
submissionCount: task._count.submissions,
|
||||
paymentCount: task._count.payments,
|
||||
spotsRemaining: task.maxSubmissions - task._count.submissions,
|
||||
}));
|
||||
response_util_1.ResponseUtil.success(res, tasksWithStats);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Get my tasks error:", error);
|
||||
response_util_1.ResponseUtil.internalError(res, "Failed to fetch tasks");
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.TaskController = TaskController;
|
||||
413
dmtp/server/src/controllers/task.controller.ts
Normal file
413
dmtp/server/src/controllers/task.controller.ts
Normal file
@@ -0,0 +1,413 @@
|
||||
import { Response } from "express";
|
||||
import { prisma } from "../database/connections";
|
||||
import { blockchainService } from "../services/blockchain.service";
|
||||
import {
|
||||
AuthenticatedRequest,
|
||||
CreateTaskDto,
|
||||
TaskListQuery,
|
||||
} from "../types/api.types";
|
||||
import { TaskStatus } from "../types/database.types";
|
||||
import { ResponseUtil } from "../utils/response.util";
|
||||
|
||||
export class TaskController {
|
||||
/**
|
||||
* POST /api/tasks/create
|
||||
* Create a new task
|
||||
*/
|
||||
static async createTask(
|
||||
req: AuthenticatedRequest,
|
||||
res: Response
|
||||
): Promise<void> {
|
||||
try {
|
||||
const taskData: CreateTaskDto = req.body;
|
||||
const walletAddress = req.user!.walletAddress;
|
||||
|
||||
console.log(`\n📝 Creating task for requester: ${walletAddress}`);
|
||||
|
||||
// Step 1: Validate requester has sufficient cUSD balance
|
||||
const balance = await blockchainService.getCUSDBalance(walletAddress);
|
||||
const balanceNum = parseFloat(balance);
|
||||
|
||||
if (balanceNum < taskData.paymentAmount) {
|
||||
ResponseUtil.error(
|
||||
res,
|
||||
`Insufficient balance. Required: ${taskData.paymentAmount} cUSD, Available: ${balanceNum} cUSD`,
|
||||
"INSUFFICIENT_BALANCE",
|
||||
400
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 2: Calculate duration in days
|
||||
const expiresAt = new Date(taskData.expiresAt);
|
||||
const now = new Date();
|
||||
const durationMs = expiresAt.getTime() - now.getTime();
|
||||
const durationInDays = Math.ceil(durationMs / (1000 * 60 * 60 * 24));
|
||||
|
||||
// Step 3: Create task on blockchain
|
||||
console.log("⛓️ Creating task on blockchain...");
|
||||
const blockchainResult = await blockchainService.createTask(
|
||||
taskData.paymentAmount.toString(),
|
||||
durationInDays
|
||||
);
|
||||
|
||||
|
||||
|
||||
// Step 4: Store task metadata in database
|
||||
console.log("💾 Storing task in database...");
|
||||
const task = await prisma.task.create({
|
||||
data: {
|
||||
requesterId: req.user!.userId!,
|
||||
title: taskData.title,
|
||||
description: taskData.description,
|
||||
taskType: taskData.taskType,
|
||||
paymentAmount: taskData.paymentAmount,
|
||||
verificationCriteria: taskData.verificationCriteria,
|
||||
maxSubmissions: taskData.maxSubmissions,
|
||||
expiresAt: expiresAt,
|
||||
contractTaskId: blockchainResult.taskId, // ← This should now work
|
||||
status: TaskStatus.OPEN,
|
||||
},
|
||||
include: {
|
||||
requester: {
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
reputationScore: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Update requester's task count
|
||||
await prisma.user.update({
|
||||
where: { id: req.user!.userId },
|
||||
data: {
|
||||
totalTasksCreated: {
|
||||
// ← This should now work
|
||||
increment: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`✅ Task created successfully! ID: ${task.id}`);
|
||||
|
||||
ResponseUtil.success(
|
||||
res,
|
||||
{
|
||||
task,
|
||||
blockchain: {
|
||||
taskId: blockchainResult.taskId,
|
||||
transactionHash: blockchainResult.txHash,
|
||||
},
|
||||
},
|
||||
201
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Create task error:", error);
|
||||
ResponseUtil.internalError(res, `Failed to create task: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/tasks/list
|
||||
* Get paginated list of tasks
|
||||
*/
|
||||
static async listTasks(
|
||||
req: AuthenticatedRequest,
|
||||
res: Response
|
||||
): Promise<void> {
|
||||
try {
|
||||
const query: TaskListQuery = {
|
||||
status: req.query.status as TaskStatus,
|
||||
taskType: req.query.taskType as any,
|
||||
limit: parseInt(req.query.limit as string) || 20,
|
||||
offset: parseInt(req.query.offset as string) || 0,
|
||||
sortBy: (req.query.sortBy as any) || "paymentAmount",
|
||||
sortOrder: (req.query.sortOrder as any) || "desc",
|
||||
};
|
||||
|
||||
// Build where clause
|
||||
const where: any = {};
|
||||
|
||||
if (query.status) {
|
||||
where.status = query.status;
|
||||
}
|
||||
|
||||
if (query.taskType) {
|
||||
where.taskType = query.taskType;
|
||||
}
|
||||
|
||||
// Only show non-expired tasks
|
||||
where.expiresAt = {
|
||||
gt: new Date(),
|
||||
};
|
||||
|
||||
// Build orderBy clause
|
||||
const orderBy: any = {};
|
||||
orderBy[query.sortBy!] = query.sortOrder;
|
||||
|
||||
// Query tasks
|
||||
const [tasks, total] = await Promise.all([
|
||||
prisma.task.findMany({
|
||||
where,
|
||||
orderBy,
|
||||
skip: query.offset,
|
||||
take: query.limit,
|
||||
include: {
|
||||
requester: {
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
reputationScore: true,
|
||||
},
|
||||
},
|
||||
_count: {
|
||||
select: {
|
||||
submissions: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
prisma.task.count({ where }),
|
||||
]);
|
||||
|
||||
// Add calculated fields
|
||||
const tasksWithExtras = tasks.map((task) => ({
|
||||
...task,
|
||||
submissionCount: task._count.submissions,
|
||||
spotsRemaining: task.maxSubmissions - task._count.submissions,
|
||||
timeRemaining: task.expiresAt.getTime() - Date.now(),
|
||||
isExpiringSoon:
|
||||
task.expiresAt.getTime() - Date.now() < 24 * 60 * 60 * 1000, // < 24 hours
|
||||
}));
|
||||
|
||||
ResponseUtil.success(res, tasksWithExtras, 200, {
|
||||
page: Math.floor(query.offset! / query.limit!) + 1,
|
||||
limit: query.limit,
|
||||
total,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("List tasks error:", error);
|
||||
ResponseUtil.internalError(res, "Failed to fetch tasks");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/tasks/:taskId
|
||||
* Get single task details
|
||||
*/
|
||||
static async getTask(
|
||||
req: AuthenticatedRequest,
|
||||
res: Response
|
||||
): Promise<void> {
|
||||
try {
|
||||
const { taskId } = req.params;
|
||||
|
||||
const task = await prisma.task.findUnique({
|
||||
where: { id: taskId },
|
||||
include: {
|
||||
requester: {
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
reputationScore: true,
|
||||
totalTasksCreated: true,
|
||||
},
|
||||
},
|
||||
submissions: {
|
||||
select: {
|
||||
id: true,
|
||||
workerId: true,
|
||||
verificationStatus: true,
|
||||
createdAt: true,
|
||||
},
|
||||
},
|
||||
_count: {
|
||||
select: {
|
||||
submissions: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!task) {
|
||||
ResponseUtil.notFound(res, "Task");
|
||||
return;
|
||||
}
|
||||
|
||||
// Add calculated fields
|
||||
const taskWithExtras = {
|
||||
...task,
|
||||
submissionCount: task._count.submissions,
|
||||
spotsRemaining: task.maxSubmissions - task._count.submissions,
|
||||
timeRemaining: task.expiresAt.getTime() - Date.now(),
|
||||
isExpired: task.expiresAt.getTime() < Date.now(),
|
||||
canSubmit:
|
||||
task.status === TaskStatus.OPEN &&
|
||||
task.expiresAt.getTime() > Date.now() &&
|
||||
task._count.submissions < task.maxSubmissions,
|
||||
};
|
||||
|
||||
ResponseUtil.success(res, taskWithExtras);
|
||||
} catch (error) {
|
||||
console.error("Get task error:", error);
|
||||
ResponseUtil.internalError(res, "Failed to fetch task");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/tasks/my-tasks
|
||||
* Get tasks created by authenticated user
|
||||
*/
|
||||
static async getMyTasks(
|
||||
req: AuthenticatedRequest,
|
||||
res: Response
|
||||
): Promise<void> {
|
||||
try {
|
||||
const userId = req.user!.userId!;
|
||||
|
||||
const tasks = await prisma.task.findMany({
|
||||
where: { requesterId: userId },
|
||||
orderBy: { createdAt: "desc" },
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
submissions: true,
|
||||
payments: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const tasksWithStats = tasks.map((task) => ({
|
||||
...task,
|
||||
submissionCount: task._count.submissions,
|
||||
paymentCount: task._count.payments,
|
||||
spotsRemaining: task.maxSubmissions - task._count.submissions,
|
||||
}));
|
||||
|
||||
ResponseUtil.success(res, tasksWithStats);
|
||||
} catch (error) {
|
||||
console.error("Get my tasks error:", error);
|
||||
ResponseUtil.internalError(res, "Failed to fetch tasks");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/tasks/sync
|
||||
* Sync task from blockchain admin dashboard to database
|
||||
*/
|
||||
static async syncTask(
|
||||
req: any,
|
||||
res: Response
|
||||
): Promise<void> {
|
||||
try {
|
||||
const {
|
||||
contractTaskId,
|
||||
transactionHash,
|
||||
paymentAmount,
|
||||
taskName,
|
||||
taskType,
|
||||
description,
|
||||
maxSubmissions,
|
||||
durationInDays,
|
||||
verificationCriteria,
|
||||
} = req.body;
|
||||
|
||||
if (!contractTaskId || !transactionHash) {
|
||||
ResponseUtil.error(
|
||||
res,
|
||||
"contractTaskId and transactionHash are required",
|
||||
"MISSING_REQUIRED_FIELDS",
|
||||
400
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`\n🔄 Syncing blockchain task ${contractTaskId} to database`
|
||||
);
|
||||
|
||||
// Check if task already exists
|
||||
const existingTask = await prisma.task.findFirst({
|
||||
where: { contractTaskId: parseInt(contractTaskId) },
|
||||
});
|
||||
|
||||
if (existingTask) {
|
||||
console.log(`✅ Task ${contractTaskId} already exists in database`);
|
||||
ResponseUtil.success(res, {
|
||||
message: "Task already synced",
|
||||
task: existingTask,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Find or create the system requester for blockchain-created tasks
|
||||
let adminUser = await prisma.user.findFirst({
|
||||
where: { walletAddress: "0xadmin_blockchain" },
|
||||
});
|
||||
|
||||
if (!adminUser) {
|
||||
adminUser = await prisma.user.create({
|
||||
data: {
|
||||
walletAddress: "0xadmin_blockchain",
|
||||
role: "requester",
|
||||
reputationScore: 100,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate expiration date from durationInDays
|
||||
const expiresAt = new Date(
|
||||
Date.now() + (durationInDays || 30) * 24 * 60 * 60 * 1000
|
||||
);
|
||||
|
||||
// Create task in database with blockchain metadata
|
||||
const task = await prisma.task.create({
|
||||
data: {
|
||||
requesterId: adminUser.id,
|
||||
title: taskName || "Blockchain Task",
|
||||
description: description || "Created via blockchain admin dashboard",
|
||||
taskType: (taskType as any) || "text_verification",
|
||||
paymentAmount: parseFloat(paymentAmount) || 0,
|
||||
verificationCriteria: verificationCriteria || {
|
||||
transactionHash: transactionHash,
|
||||
blockchainCreated: true,
|
||||
},
|
||||
maxSubmissions: maxSubmissions || 10,
|
||||
contractTaskId: parseInt(contractTaskId),
|
||||
expiresAt: expiresAt,
|
||||
status: TaskStatus.OPEN,
|
||||
},
|
||||
include: {
|
||||
requester: {
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
reputationScore: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`✅ Task synced successfully! DB ID: ${task.id}`);
|
||||
|
||||
ResponseUtil.success(
|
||||
res,
|
||||
{
|
||||
message: "Task synced to database",
|
||||
task,
|
||||
blockchain: {
|
||||
contractTaskId: parseInt(contractTaskId),
|
||||
transactionHash,
|
||||
},
|
||||
},
|
||||
201
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Sync task error:", error);
|
||||
ResponseUtil.internalError(res, "Failed to sync task from blockchain");
|
||||
}
|
||||
}
|
||||
}
|
||||
148
dmtp/server/src/controllers/user.controller.js
Normal file
148
dmtp/server/src/controllers/user.controller.js
Normal file
@@ -0,0 +1,148 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.UserController = void 0;
|
||||
const connections_1 = require("../database/connections");
|
||||
const response_util_1 = require("../utils/response.util");
|
||||
class UserController {
|
||||
/**
|
||||
* POST /api/users/register
|
||||
* Register a new user
|
||||
*/
|
||||
static async register(req, res) {
|
||||
try {
|
||||
const userData = req.body;
|
||||
console.log(`\n👤 Registering user: ${userData.walletAddress}`);
|
||||
// Check if user already exists
|
||||
const existingUser = await connections_1.prisma.user.findUnique({
|
||||
where: { walletAddress: userData.walletAddress.toLowerCase() },
|
||||
});
|
||||
if (existingUser) {
|
||||
response_util_1.ResponseUtil.error(res, 'User already registered', 'USER_EXISTS', 409);
|
||||
return;
|
||||
}
|
||||
// Create user
|
||||
const user = await connections_1.prisma.user.create({
|
||||
data: {
|
||||
walletAddress: userData.walletAddress.toLowerCase(),
|
||||
phoneNumber: userData.phoneNumber,
|
||||
role: userData.role,
|
||||
reputationScore: 0,
|
||||
totalEarnings: 0,
|
||||
},
|
||||
});
|
||||
console.log(`✅ User registered: ${user.id}`);
|
||||
response_util_1.ResponseUtil.success(res, {
|
||||
id: user.id,
|
||||
walletAddress: user.walletAddress,
|
||||
role: user.role,
|
||||
createdAt: user.createdAt,
|
||||
}, 201);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Register user error:', error);
|
||||
response_util_1.ResponseUtil.internalError(res, 'Failed to register user');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* GET /api/users/profile
|
||||
* Get authenticated user's profile
|
||||
*/
|
||||
static async getProfile(req, res) {
|
||||
try {
|
||||
const userId = req.user.userId;
|
||||
const user = await connections_1.prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
createdTasks: true,
|
||||
submissions: true,
|
||||
payments: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
if (!user) {
|
||||
response_util_1.ResponseUtil.notFound(res, 'User');
|
||||
return;
|
||||
}
|
||||
// Get additional stats
|
||||
const [approvedSubmissions, rejectedSubmissions, pendingSubmissions] = await Promise.all([
|
||||
connections_1.prisma.submission.count({
|
||||
where: {
|
||||
workerId: userId,
|
||||
verificationStatus: 'approved',
|
||||
},
|
||||
}),
|
||||
connections_1.prisma.submission.count({
|
||||
where: {
|
||||
workerId: userId,
|
||||
verificationStatus: 'rejected',
|
||||
},
|
||||
}),
|
||||
connections_1.prisma.submission.count({
|
||||
where: {
|
||||
workerId: userId,
|
||||
verificationStatus: 'pending',
|
||||
},
|
||||
}),
|
||||
]);
|
||||
const profile = {
|
||||
id: user.id,
|
||||
walletAddress: user.walletAddress,
|
||||
phoneNumber: user.phoneNumber,
|
||||
role: user.role,
|
||||
reputationScore: user.reputationScore,
|
||||
totalEarnings: user.totalEarnings,
|
||||
createdAt: user.createdAt,
|
||||
stats: {
|
||||
tasksCreated: user._count.createdTasks,
|
||||
submissionsTotal: user._count.submissions,
|
||||
submissionsApproved: approvedSubmissions,
|
||||
submissionsRejected: rejectedSubmissions,
|
||||
submissionsPending: pendingSubmissions,
|
||||
paymentsReceived: user._count.payments,
|
||||
approvalRate: user._count.submissions > 0
|
||||
? ((approvedSubmissions / user._count.submissions) * 100).toFixed(2)
|
||||
: 0,
|
||||
},
|
||||
};
|
||||
response_util_1.ResponseUtil.success(res, profile);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Get profile error:', error);
|
||||
response_util_1.ResponseUtil.internalError(res, 'Failed to fetch profile');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* GET /api/users/:walletAddress/public
|
||||
* Get public user profile (for displaying requester info)
|
||||
*/
|
||||
static async getPublicProfile(req, res) {
|
||||
try {
|
||||
const { walletAddress } = req.params;
|
||||
const user = await connections_1.prisma.user.findUnique({
|
||||
where: { walletAddress: walletAddress.toLowerCase() },
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
role: true,
|
||||
reputationScore: true,
|
||||
totalTasksCreated: true,
|
||||
totalTasksCompleted: true,
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
if (!user) {
|
||||
response_util_1.ResponseUtil.notFound(res, 'User');
|
||||
return;
|
||||
}
|
||||
response_util_1.ResponseUtil.success(res, user);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Get public profile error:', error);
|
||||
response_util_1.ResponseUtil.internalError(res, 'Failed to fetch user');
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.UserController = UserController;
|
||||
165
dmtp/server/src/controllers/user.controller.ts
Normal file
165
dmtp/server/src/controllers/user.controller.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import { Response } from 'express';
|
||||
import { prisma } from '../database/connections';
|
||||
import { AuthenticatedRequest, RegisterUserDto } from '../types/api.types';
|
||||
import { ResponseUtil } from '../utils/response.util';
|
||||
|
||||
export class UserController {
|
||||
/**
|
||||
* POST /api/users/register
|
||||
* Register a new user
|
||||
*/
|
||||
static async register(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||
try {
|
||||
const userData: RegisterUserDto = req.body;
|
||||
|
||||
console.log(`\n👤 Registering user: ${userData.walletAddress}`);
|
||||
|
||||
// Check if user already exists
|
||||
const existingUser = await prisma.user.findUnique({
|
||||
where: { walletAddress: userData.walletAddress.toLowerCase() },
|
||||
});
|
||||
|
||||
if (existingUser) {
|
||||
ResponseUtil.error(res, 'User already registered', 'USER_EXISTS', 409);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create user
|
||||
const user = await prisma.user.create({
|
||||
data: {
|
||||
walletAddress: userData.walletAddress.toLowerCase(),
|
||||
phoneNumber: userData.phoneNumber,
|
||||
role: userData.role,
|
||||
reputationScore: 0,
|
||||
totalEarnings: 0,
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`✅ User registered: ${user.id}`);
|
||||
|
||||
ResponseUtil.success(
|
||||
res,
|
||||
{
|
||||
id: user.id,
|
||||
walletAddress: user.walletAddress,
|
||||
role: user.role,
|
||||
createdAt: user.createdAt,
|
||||
},
|
||||
201
|
||||
);
|
||||
} catch (error) {
|
||||
console.error('Register user error:', error);
|
||||
ResponseUtil.internalError(res, 'Failed to register user');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/users/profile
|
||||
* Get authenticated user's profile
|
||||
*/
|
||||
static async getProfile(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||
try {
|
||||
const userId = req.user!.userId!;
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
include: {
|
||||
_count: {
|
||||
select: {
|
||||
createdTasks: true,
|
||||
submissions: true,
|
||||
payments: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
ResponseUtil.notFound(res, 'User');
|
||||
return;
|
||||
}
|
||||
|
||||
// Get additional stats
|
||||
const [approvedSubmissions, rejectedSubmissions, pendingSubmissions] = await Promise.all([
|
||||
prisma.submission.count({
|
||||
where: {
|
||||
workerId: userId,
|
||||
verificationStatus: 'approved',
|
||||
},
|
||||
}),
|
||||
prisma.submission.count({
|
||||
where: {
|
||||
workerId: userId,
|
||||
verificationStatus: 'rejected',
|
||||
},
|
||||
}),
|
||||
prisma.submission.count({
|
||||
where: {
|
||||
workerId: userId,
|
||||
verificationStatus: 'pending',
|
||||
},
|
||||
}),
|
||||
]);
|
||||
|
||||
const profile = {
|
||||
id: user.id,
|
||||
walletAddress: user.walletAddress,
|
||||
phoneNumber: user.phoneNumber,
|
||||
role: user.role,
|
||||
reputationScore: user.reputationScore,
|
||||
totalEarnings: user.totalEarnings,
|
||||
createdAt: user.createdAt,
|
||||
stats: {
|
||||
tasksCreated: user._count.createdTasks,
|
||||
submissionsTotal: user._count.submissions,
|
||||
submissionsApproved: approvedSubmissions,
|
||||
submissionsRejected: rejectedSubmissions,
|
||||
submissionsPending: pendingSubmissions,
|
||||
paymentsReceived: user._count.payments,
|
||||
approvalRate:
|
||||
user._count.submissions > 0
|
||||
? ((approvedSubmissions / user._count.submissions) * 100).toFixed(2)
|
||||
: 0,
|
||||
},
|
||||
};
|
||||
|
||||
ResponseUtil.success(res, profile);
|
||||
} catch (error) {
|
||||
console.error('Get profile error:', error);
|
||||
ResponseUtil.internalError(res, 'Failed to fetch profile');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/users/:walletAddress/public
|
||||
* Get public user profile (for displaying requester info)
|
||||
*/
|
||||
static async getPublicProfile(req: AuthenticatedRequest, res: Response): Promise<void> {
|
||||
try {
|
||||
const { walletAddress } = req.params;
|
||||
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { walletAddress: walletAddress.toLowerCase() },
|
||||
select: {
|
||||
id: true,
|
||||
walletAddress: true,
|
||||
role: true,
|
||||
reputationScore: true,
|
||||
totalTasksCreated: true,
|
||||
totalTasksCompleted: true,
|
||||
createdAt: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
ResponseUtil.notFound(res, 'User');
|
||||
return;
|
||||
}
|
||||
|
||||
ResponseUtil.success(res, user);
|
||||
} catch (error) {
|
||||
console.error('Get public profile error:', error);
|
||||
ResponseUtil.internalError(res, 'Failed to fetch user');
|
||||
}
|
||||
}
|
||||
}
|
||||
22
dmtp/server/src/dashboard.ts
Normal file
22
dmtp/server/src/dashboard.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { createBullBoard } from '@bull-board/api';
|
||||
import { BullAdapter } from '@bull-board/api/bullAdapter';
|
||||
import { ExpressAdapter } from '@bull-board/express';
|
||||
import express from 'express';
|
||||
import { verificationQueue } from './queues/verification.queue';
|
||||
|
||||
/**
|
||||
* Create Bull Board dashboard
|
||||
*/
|
||||
export function createQueueDashboard(): express.Router {
|
||||
const serverAdapter = new ExpressAdapter();
|
||||
serverAdapter.setBasePath('/admin/queues');
|
||||
|
||||
createBullBoard({
|
||||
queues: [new BullAdapter(verificationQueue)],
|
||||
serverAdapter: serverAdapter,
|
||||
});
|
||||
|
||||
console.log('📊 Bull Board dashboard available at: /admin/queues');
|
||||
|
||||
return serverAdapter.getRouter();
|
||||
}
|
||||
30
dmtp/server/src/database/connections.js
Normal file
30
dmtp/server/src/database/connections.js
Normal file
@@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.testConnection = exports.disconnectDB = exports.prisma = void 0;
|
||||
const client_1 = require("@prisma/client");
|
||||
// Singleton pattern for Prisma Client
|
||||
const globalForPrisma = global;
|
||||
exports.prisma = globalForPrisma.prisma ||
|
||||
new client_1.PrismaClient({
|
||||
log: process.env.NODE_ENV === 'development' ? ['query', 'error', 'warn'] : ['error'],
|
||||
});
|
||||
if (process.env.NODE_ENV !== 'production')
|
||||
globalForPrisma.prisma = exports.prisma;
|
||||
// Graceful shutdown
|
||||
const disconnectDB = async () => {
|
||||
await exports.prisma.$disconnect();
|
||||
};
|
||||
exports.disconnectDB = disconnectDB;
|
||||
// Test connection
|
||||
const testConnection = async () => {
|
||||
try {
|
||||
await exports.prisma.$connect();
|
||||
console.log('✅ Database connected successfully');
|
||||
return true;
|
||||
}
|
||||
catch (error) {
|
||||
console.error('❌ Database connection failed:', error);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
exports.testConnection = testConnection;
|
||||
29
dmtp/server/src/database/connections.ts
Normal file
29
dmtp/server/src/database/connections.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
|
||||
// Singleton pattern for Prisma Client
|
||||
const globalForPrisma = global as unknown as { prisma: PrismaClient };
|
||||
|
||||
export const prisma =
|
||||
globalForPrisma.prisma ||
|
||||
new PrismaClient({
|
||||
log: process.env.NODE_ENV === 'development' ? ['query', 'error', 'warn'] : ['error'],
|
||||
});
|
||||
|
||||
if (process.env.NODE_ENV !== 'production') globalForPrisma.prisma = prisma;
|
||||
|
||||
// Graceful shutdown
|
||||
export const disconnectDB = async () => {
|
||||
await prisma.$disconnect();
|
||||
};
|
||||
|
||||
// Test connection
|
||||
export const testConnection = async () => {
|
||||
try {
|
||||
await prisma.$connect();
|
||||
console.log('✅ Database connected successfully');
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('❌ Database connection failed:', error);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
51
dmtp/server/src/database/seed.js
Normal file
51
dmtp/server/src/database/seed.js
Normal file
@@ -0,0 +1,51 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const database_types_1 = require("../types/database.types");
|
||||
const connections_1 = require("./connections");
|
||||
async function main() {
|
||||
console.log('🌱 Seeding database...');
|
||||
// Create test users
|
||||
const requester = await connections_1.prisma.user.create({
|
||||
data: {
|
||||
walletAddress: '0x1234567890123456789012345678901234567890',
|
||||
role: database_types_1.UserRole.REQUESTER,
|
||||
reputationScore: 100,
|
||||
},
|
||||
});
|
||||
const worker = await connections_1.prisma.user.create({
|
||||
data: {
|
||||
walletAddress: '0x0987654321098765432109876543210987654321',
|
||||
role: database_types_1.UserRole.WORKER,
|
||||
reputationScore: 50,
|
||||
totalEarnings: 100.50,
|
||||
},
|
||||
});
|
||||
// Create test task
|
||||
const task = await connections_1.prisma.task.create({
|
||||
data: {
|
||||
requesterId: requester.id,
|
||||
title: 'Label 100 images of cats and dogs',
|
||||
description: 'Please identify if the image contains a cat or dog',
|
||||
taskType: database_types_1.TaskType.IMAGE_LABELING,
|
||||
paymentAmount: 5.0,
|
||||
status: database_types_1.TaskStatus.OPEN,
|
||||
verificationCriteria: {
|
||||
requiredFields: ['label', 'confidence'],
|
||||
aiPrompt: 'Verify if the labeled animal matches the image',
|
||||
minConfidenceScore: 0.8,
|
||||
},
|
||||
maxSubmissions: 10,
|
||||
expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000), // 7 days from now
|
||||
},
|
||||
});
|
||||
console.log('✅ Seed data created');
|
||||
console.log({ requester, worker, task });
|
||||
}
|
||||
main()
|
||||
.catch((e) => {
|
||||
console.error('❌ Seed failed:', e);
|
||||
process.exit(1);
|
||||
})
|
||||
.finally(async () => {
|
||||
await connections_1.prisma.$disconnect();
|
||||
});
|
||||
55
dmtp/server/src/database/seed.ts
Normal file
55
dmtp/server/src/database/seed.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { TaskStatus, TaskType, UserRole } from '../types/database.types';
|
||||
import { prisma } from './connections';
|
||||
|
||||
async function main() {
|
||||
console.log('🌱 Seeding database...');
|
||||
|
||||
// Create test users
|
||||
const requester = await prisma.user.create({
|
||||
data: {
|
||||
walletAddress: '0x1234567890123456789012345678901234567890',
|
||||
role: UserRole.REQUESTER,
|
||||
reputationScore: 100,
|
||||
},
|
||||
});
|
||||
|
||||
const worker = await prisma.user.create({
|
||||
data: {
|
||||
walletAddress: '0x0987654321098765432109876543210987654321',
|
||||
role: UserRole.WORKER,
|
||||
reputationScore: 50,
|
||||
totalEarnings: 100.50,
|
||||
},
|
||||
});
|
||||
|
||||
// Create test task
|
||||
const task = await prisma.task.create({
|
||||
data: {
|
||||
requesterId: requester.id,
|
||||
title: 'Label 100 images of cats and dogs',
|
||||
description: 'Please identify if the image contains a cat or dog',
|
||||
taskType: TaskType.IMAGE_LABELING,
|
||||
paymentAmount: 5.0,
|
||||
status: TaskStatus.OPEN,
|
||||
verificationCriteria: {
|
||||
requiredFields: ['label', 'confidence'],
|
||||
aiPrompt: 'Verify if the labeled animal matches the image',
|
||||
minConfidenceScore: 0.8,
|
||||
},
|
||||
maxSubmissions: 10,
|
||||
expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000), // 7 days from now
|
||||
},
|
||||
});
|
||||
|
||||
console.log('✅ Seed data created');
|
||||
console.log({ requester, worker, task });
|
||||
}
|
||||
|
||||
main()
|
||||
.catch((e) => {
|
||||
console.error('❌ Seed failed:', e);
|
||||
process.exit(1);
|
||||
})
|
||||
.finally(async () => {
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
91
dmtp/server/src/middlewares/auth.middleware.js
Normal file
91
dmtp/server/src/middlewares/auth.middleware.js
Normal file
@@ -0,0 +1,91 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AuthMiddleware = void 0;
|
||||
const connections_1 = require("../database/connections");
|
||||
const response_util_1 = require("../utils/response.util");
|
||||
const signature_util_1 = require("../utils/signature.util");
|
||||
class AuthMiddleware {
|
||||
/**
|
||||
* Verify wallet signature authentication
|
||||
*/
|
||||
static async verifyWallet(req, res, next) {
|
||||
try {
|
||||
// Get auth headers
|
||||
const walletAddress = req.headers['x-wallet-address'];
|
||||
const signature = req.headers['x-signature'];
|
||||
const encodedMessage = req.headers['x-message'];
|
||||
const timestamp = parseInt(req.headers['x-timestamp']);
|
||||
// Check if all required headers are present
|
||||
if (!walletAddress || !signature || !encodedMessage || !timestamp) {
|
||||
response_util_1.ResponseUtil.unauthorized(res, 'Missing authentication headers');
|
||||
return;
|
||||
}
|
||||
// Decode the Base64-encoded message
|
||||
let message;
|
||||
try {
|
||||
message = decodeURIComponent(Buffer.from(encodedMessage, 'base64').toString());
|
||||
}
|
||||
catch (decodeError) {
|
||||
response_util_1.ResponseUtil.unauthorized(res, 'Invalid message encoding');
|
||||
return;
|
||||
}
|
||||
// Validate timestamp (prevent replay attacks)
|
||||
if (!signature_util_1.SignatureUtil.isTimestampValid(timestamp)) {
|
||||
response_util_1.ResponseUtil.unauthorized(res, 'Authentication expired. Please sign again.');
|
||||
return;
|
||||
}
|
||||
// Verify the expected message format
|
||||
const expectedMessage = signature_util_1.SignatureUtil.generateAuthMessage(walletAddress, timestamp);
|
||||
if (message !== expectedMessage) {
|
||||
response_util_1.ResponseUtil.unauthorized(res, 'Invalid authentication message');
|
||||
return;
|
||||
}
|
||||
// Verify signature
|
||||
const isValid = signature_util_1.SignatureUtil.verifySignature(message, signature, walletAddress);
|
||||
if (!isValid) {
|
||||
response_util_1.ResponseUtil.unauthorized(res, 'Invalid signature');
|
||||
return;
|
||||
}
|
||||
// Get user from database
|
||||
const user = await connections_1.prisma.user.findUnique({
|
||||
where: { walletAddress: walletAddress.toLowerCase() },
|
||||
});
|
||||
if (!user) {
|
||||
response_util_1.ResponseUtil.unauthorized(res, 'User not registered');
|
||||
return;
|
||||
}
|
||||
// Attach user to request
|
||||
req.user = {
|
||||
walletAddress: walletAddress.toLowerCase(),
|
||||
userId: user.id,
|
||||
};
|
||||
next();
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Auth middleware error:', error);
|
||||
response_util_1.ResponseUtil.internalError(res, 'Authentication failed');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Optional authentication (doesn't fail if not authenticated)
|
||||
*/
|
||||
static async optionalAuth(req, res, next) {
|
||||
try {
|
||||
const walletAddress = req.headers['x-wallet-address'];
|
||||
const signature = req.headers['x-signature'];
|
||||
if (walletAddress && signature) {
|
||||
// Try to authenticate
|
||||
await AuthMiddleware.verifyWallet(req, res, next);
|
||||
}
|
||||
else {
|
||||
// Continue without authentication
|
||||
next();
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// Continue without authentication
|
||||
next();
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.AuthMiddleware = AuthMiddleware;
|
||||
106
dmtp/server/src/middlewares/auth.middleware.ts
Normal file
106
dmtp/server/src/middlewares/auth.middleware.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { NextFunction, Response } from 'express';
|
||||
import { prisma } from '../database/connections';
|
||||
import { AuthenticatedRequest } from '../types/api.types';
|
||||
import { ResponseUtil } from '../utils/response.util';
|
||||
import { SignatureUtil } from '../utils/signature.util';
|
||||
|
||||
export class AuthMiddleware {
|
||||
/**
|
||||
* Verify wallet signature authentication
|
||||
*/
|
||||
static async verifyWallet(
|
||||
req: AuthenticatedRequest,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
try {
|
||||
// Get auth headers
|
||||
const walletAddress = req.headers['x-wallet-address'] as string;
|
||||
const signature = req.headers['x-signature'] as string;
|
||||
const encodedMessage = req.headers['x-message'] as string;
|
||||
const timestamp = parseInt(req.headers['x-timestamp'] as string);
|
||||
|
||||
// Check if all required headers are present
|
||||
if (!walletAddress || !signature || !encodedMessage || !timestamp) {
|
||||
ResponseUtil.unauthorized(res, 'Missing authentication headers');
|
||||
return;
|
||||
}
|
||||
|
||||
// Decode the Base64-encoded message
|
||||
let message: string;
|
||||
try {
|
||||
message = decodeURIComponent(Buffer.from(encodedMessage, 'base64').toString());
|
||||
} catch (decodeError) {
|
||||
ResponseUtil.unauthorized(res, 'Invalid message encoding');
|
||||
return;
|
||||
}
|
||||
|
||||
// Validate timestamp (prevent replay attacks)
|
||||
if (!SignatureUtil.isTimestampValid(timestamp)) {
|
||||
ResponseUtil.unauthorized(res, 'Authentication expired. Please sign again.');
|
||||
return;
|
||||
}
|
||||
|
||||
// Verify the expected message format
|
||||
const expectedMessage = SignatureUtil.generateAuthMessage(walletAddress, timestamp);
|
||||
if (message !== expectedMessage) {
|
||||
ResponseUtil.unauthorized(res, 'Invalid authentication message');
|
||||
return;
|
||||
}
|
||||
|
||||
// Verify signature
|
||||
const isValid = SignatureUtil.verifySignature(message, signature, walletAddress);
|
||||
|
||||
if (!isValid) {
|
||||
ResponseUtil.unauthorized(res, 'Invalid signature');
|
||||
return;
|
||||
}
|
||||
|
||||
// Get user from database
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { walletAddress: walletAddress.toLowerCase() },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
ResponseUtil.unauthorized(res, 'User not registered');
|
||||
return;
|
||||
}
|
||||
|
||||
// Attach user to request
|
||||
req.user = {
|
||||
walletAddress: walletAddress.toLowerCase(),
|
||||
userId: user.id,
|
||||
};
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
console.error('Auth middleware error:', error);
|
||||
ResponseUtil.internalError(res, 'Authentication failed');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Optional authentication (doesn't fail if not authenticated)
|
||||
*/
|
||||
static async optionalAuth(
|
||||
req: AuthenticatedRequest,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): Promise<void> {
|
||||
try {
|
||||
const walletAddress = req.headers['x-wallet-address'] as string;
|
||||
const signature = req.headers['x-signature'] as string;
|
||||
|
||||
if (walletAddress && signature) {
|
||||
// Try to authenticate
|
||||
await AuthMiddleware.verifyWallet(req, res, next);
|
||||
} else {
|
||||
// Continue without authentication
|
||||
next();
|
||||
}
|
||||
} catch (error) {
|
||||
// Continue without authentication
|
||||
next();
|
||||
}
|
||||
}
|
||||
}
|
||||
57
dmtp/server/src/middlewares/error.middleware.js
Normal file
57
dmtp/server/src/middlewares/error.middleware.js
Normal file
@@ -0,0 +1,57 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ErrorMiddleware = void 0;
|
||||
const response_util_1 = require("../utils/response.util");
|
||||
class ErrorMiddleware {
|
||||
/**
|
||||
* Global error handler
|
||||
*/
|
||||
static handle(error, req, res, next) {
|
||||
console.error('Error caught by global handler:', error);
|
||||
// Prisma errors
|
||||
if (error.code && error.code.startsWith('P')) {
|
||||
return ErrorMiddleware.handlePrismaError(error, res);
|
||||
}
|
||||
// Validation errors
|
||||
if (error.name === 'ValidationError') {
|
||||
return response_util_1.ResponseUtil.validationError(res, error.details);
|
||||
}
|
||||
// JWT errors
|
||||
if (error.name === 'JsonWebTokenError') {
|
||||
return response_util_1.ResponseUtil.unauthorized(res, 'Invalid token');
|
||||
}
|
||||
// Default error
|
||||
const statusCode = error.statusCode || 500;
|
||||
const message = error.message || 'Internal server error';
|
||||
const code = error.code || 'INTERNAL_ERROR';
|
||||
response_util_1.ResponseUtil.error(res, message, code, statusCode);
|
||||
}
|
||||
/**
|
||||
* Handle Prisma-specific errors
|
||||
*/
|
||||
static handlePrismaError(error, res) {
|
||||
switch (error.code) {
|
||||
case 'P2002':
|
||||
// Unique constraint violation
|
||||
response_util_1.ResponseUtil.error(res, 'Resource already exists', 'DUPLICATE_ENTRY', 409, { field: error.meta?.target });
|
||||
break;
|
||||
case 'P2025':
|
||||
// Record not found
|
||||
response_util_1.ResponseUtil.notFound(res, 'Resource');
|
||||
break;
|
||||
case 'P2003':
|
||||
// Foreign key constraint violation
|
||||
response_util_1.ResponseUtil.error(res, 'Invalid reference to related resource', 'INVALID_REFERENCE', 400);
|
||||
break;
|
||||
default:
|
||||
response_util_1.ResponseUtil.internalError(res, 'Database error');
|
||||
}
|
||||
}
|
||||
/**
|
||||
* 404 handler
|
||||
*/
|
||||
static notFound(req, res) {
|
||||
response_util_1.ResponseUtil.error(res, `Route ${req.method} ${req.path} not found`, 'ROUTE_NOT_FOUND', 404);
|
||||
}
|
||||
}
|
||||
exports.ErrorMiddleware = ErrorMiddleware;
|
||||
86
dmtp/server/src/middlewares/error.middleware.ts
Normal file
86
dmtp/server/src/middlewares/error.middleware.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ResponseUtil } from '../utils/response.util';
|
||||
|
||||
export class ErrorMiddleware {
|
||||
/**
|
||||
* Global error handler
|
||||
*/
|
||||
static handle(
|
||||
error: any,
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
): void | Response {
|
||||
console.error('Error caught by global handler:', error);
|
||||
|
||||
// Prisma errors
|
||||
if (error.code && error.code.startsWith('P')) {
|
||||
return ErrorMiddleware.handlePrismaError(error, res);
|
||||
}
|
||||
|
||||
// Validation errors
|
||||
if (error.name === 'ValidationError') {
|
||||
return ResponseUtil.validationError(res, error.details);
|
||||
}
|
||||
|
||||
// JWT errors
|
||||
if (error.name === 'JsonWebTokenError') {
|
||||
return ResponseUtil.unauthorized(res, 'Invalid token');
|
||||
}
|
||||
|
||||
// Default error
|
||||
const statusCode = error.statusCode || 500;
|
||||
const message = error.message || 'Internal server error';
|
||||
const code = error.code || 'INTERNAL_ERROR';
|
||||
|
||||
ResponseUtil.error(res, message, code, statusCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle Prisma-specific errors
|
||||
*/
|
||||
private static handlePrismaError(error: any, res: Response): void {
|
||||
switch (error.code) {
|
||||
case 'P2002':
|
||||
// Unique constraint violation
|
||||
ResponseUtil.error(
|
||||
res,
|
||||
'Resource already exists',
|
||||
'DUPLICATE_ENTRY',
|
||||
409,
|
||||
{ field: error.meta?.target }
|
||||
);
|
||||
break;
|
||||
|
||||
case 'P2025':
|
||||
// Record not found
|
||||
ResponseUtil.notFound(res, 'Resource');
|
||||
break;
|
||||
|
||||
case 'P2003':
|
||||
// Foreign key constraint violation
|
||||
ResponseUtil.error(
|
||||
res,
|
||||
'Invalid reference to related resource',
|
||||
'INVALID_REFERENCE',
|
||||
400
|
||||
);
|
||||
break;
|
||||
|
||||
default:
|
||||
ResponseUtil.internalError(res, 'Database error');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 404 handler
|
||||
*/
|
||||
static notFound(req: Request, res: Response): void {
|
||||
ResponseUtil.error(
|
||||
res,
|
||||
`Route ${req.method} ${req.path} not found`,
|
||||
'ROUTE_NOT_FOUND',
|
||||
404
|
||||
);
|
||||
}
|
||||
}
|
||||
72
dmtp/server/src/middlewares/rate-limit.middleware.js
Normal file
72
dmtp/server/src/middlewares/rate-limit.middleware.js
Normal file
@@ -0,0 +1,72 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.rateLimiters = exports.RateLimitMiddleware = void 0;
|
||||
const response_util_1 = require("../utils/response.util");
|
||||
class RateLimitMiddleware {
|
||||
/**
|
||||
* Create rate limiter middleware
|
||||
*/
|
||||
static create(options) {
|
||||
return (req, res, next) => {
|
||||
const key = options.keyGenerator
|
||||
? options.keyGenerator(req)
|
||||
: req.ip || 'unknown';
|
||||
const now = Date.now();
|
||||
const limit = this.limits.get(key);
|
||||
// Create new entry if doesn't exist or expired
|
||||
if (!limit || now > limit.resetTime) {
|
||||
this.limits.set(key, {
|
||||
count: 1,
|
||||
resetTime: now + options.windowMs,
|
||||
});
|
||||
return next();
|
||||
}
|
||||
// Check if limit exceeded
|
||||
if (limit.count >= options.maxRequests) {
|
||||
const retryAfter = Math.ceil((limit.resetTime - now) / 1000);
|
||||
res.setHeader('Retry-After', retryAfter);
|
||||
return response_util_1.ResponseUtil.error(res, 'Too many requests. Please try again later.', 'RATE_LIMIT_EXCEEDED', 429);
|
||||
}
|
||||
// Increment count
|
||||
limit.count++;
|
||||
next();
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Cleanup expired entries (call periodically)
|
||||
*/
|
||||
static cleanup() {
|
||||
const now = Date.now();
|
||||
for (const [key, entry] of this.limits.entries()) {
|
||||
if (now > entry.resetTime) {
|
||||
this.limits.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.RateLimitMiddleware = RateLimitMiddleware;
|
||||
RateLimitMiddleware.limits = new Map();
|
||||
// Cleanup every 5 minutes
|
||||
setInterval(() => RateLimitMiddleware.cleanup(), 5 * 60 * 1000);
|
||||
// Pre-configured rate limiters
|
||||
exports.rateLimiters = {
|
||||
// General API: 100 requests per minute
|
||||
general: RateLimitMiddleware.create({
|
||||
windowMs: 60 * 1000,
|
||||
maxRequests: 100,
|
||||
}),
|
||||
// Strict (auth, registration): 10 requests per minute
|
||||
strict: RateLimitMiddleware.create({
|
||||
windowMs: 60 * 1000,
|
||||
maxRequests: 10,
|
||||
}),
|
||||
// Per wallet: 50 requests per minute
|
||||
perWallet: RateLimitMiddleware.create({
|
||||
windowMs: 60 * 1000,
|
||||
maxRequests: 50,
|
||||
keyGenerator: (req) => {
|
||||
const walletAddress = req.headers['x-wallet-address'];
|
||||
return walletAddress || req.ip || 'unknown';
|
||||
},
|
||||
}),
|
||||
};
|
||||
94
dmtp/server/src/middlewares/rate-limit.middleware.ts
Normal file
94
dmtp/server/src/middlewares/rate-limit.middleware.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ResponseUtil } from '../utils/response.util';
|
||||
|
||||
interface RateLimitEntry {
|
||||
count: number;
|
||||
resetTime: number;
|
||||
}
|
||||
|
||||
export class RateLimitMiddleware {
|
||||
private static limits: Map<string, RateLimitEntry> = new Map();
|
||||
|
||||
/**
|
||||
* Create rate limiter middleware
|
||||
*/
|
||||
static create(options: {
|
||||
windowMs: number;
|
||||
maxRequests: number;
|
||||
keyGenerator?: (req: Request) => string;
|
||||
}) {
|
||||
return (req: Request, res: Response, next: NextFunction) => {
|
||||
const key = options.keyGenerator
|
||||
? options.keyGenerator(req)
|
||||
: req.ip || 'unknown';
|
||||
|
||||
const now = Date.now();
|
||||
const limit = this.limits.get(key);
|
||||
|
||||
// Create new entry if doesn't exist or expired
|
||||
if (!limit || now > limit.resetTime) {
|
||||
this.limits.set(key, {
|
||||
count: 1,
|
||||
resetTime: now + options.windowMs,
|
||||
});
|
||||
return next();
|
||||
}
|
||||
|
||||
// Check if limit exceeded
|
||||
if (limit.count >= options.maxRequests) {
|
||||
const retryAfter = Math.ceil((limit.resetTime - now) / 1000);
|
||||
res.setHeader('Retry-After', retryAfter);
|
||||
return ResponseUtil.error(
|
||||
res,
|
||||
'Too many requests. Please try again later.',
|
||||
'RATE_LIMIT_EXCEEDED',
|
||||
429
|
||||
);
|
||||
}
|
||||
|
||||
// Increment count
|
||||
limit.count++;
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup expired entries (call periodically)
|
||||
*/
|
||||
static cleanup(): void {
|
||||
const now = Date.now();
|
||||
for (const [key, entry] of this.limits.entries()) {
|
||||
if (now > entry.resetTime) {
|
||||
this.limits.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup every 5 minutes
|
||||
setInterval(() => RateLimitMiddleware.cleanup(), 5 * 60 * 1000);
|
||||
|
||||
// Pre-configured rate limiters
|
||||
export const rateLimiters = {
|
||||
// General API: 100 requests per minute
|
||||
general: RateLimitMiddleware.create({
|
||||
windowMs: 60 * 1000,
|
||||
maxRequests: 100,
|
||||
}),
|
||||
|
||||
// Strict (auth, registration): 10 requests per minute
|
||||
strict: RateLimitMiddleware.create({
|
||||
windowMs: 60 * 1000,
|
||||
maxRequests: 10,
|
||||
}),
|
||||
|
||||
// Per wallet: 50 requests per minute
|
||||
perWallet: RateLimitMiddleware.create({
|
||||
windowMs: 60 * 1000,
|
||||
maxRequests: 50,
|
||||
keyGenerator: (req) => {
|
||||
const walletAddress = req.headers['x-wallet-address'] as string;
|
||||
return walletAddress || req.ip || 'unknown';
|
||||
},
|
||||
}),
|
||||
};
|
||||
23
dmtp/server/src/middlewares/validation.middleware.js
Normal file
23
dmtp/server/src/middlewares/validation.middleware.js
Normal file
@@ -0,0 +1,23 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ValidationMiddleware = void 0;
|
||||
const express_validator_1 = require("express-validator");
|
||||
const response_util_1 = require("../utils/response.util");
|
||||
class ValidationMiddleware {
|
||||
/**
|
||||
* Validate request using express-validator
|
||||
*/
|
||||
static validate(validations) {
|
||||
return async (req, res, next) => {
|
||||
// Run all validations
|
||||
await Promise.all(validations.map((validation) => validation.run(req)));
|
||||
// Check for errors
|
||||
const errors = (0, express_validator_1.validationResult)(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return response_util_1.ResponseUtil.validationError(res, errors.array());
|
||||
}
|
||||
next();
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.ValidationMiddleware = ValidationMiddleware;
|
||||
23
dmtp/server/src/middlewares/validation.middleware.ts
Normal file
23
dmtp/server/src/middlewares/validation.middleware.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { NextFunction, Request, Response } from 'express';
|
||||
import { ValidationChain, validationResult } from 'express-validator';
|
||||
import { ResponseUtil } from '../utils/response.util';
|
||||
|
||||
export class ValidationMiddleware {
|
||||
/**
|
||||
* Validate request using express-validator
|
||||
*/
|
||||
static validate(validations: ValidationChain[]) {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
// Run all validations
|
||||
await Promise.all(validations.map((validation) => validation.run(req)));
|
||||
|
||||
// Check for errors
|
||||
const errors = validationResult(req);
|
||||
if (!errors.isEmpty()) {
|
||||
return ResponseUtil.validationError(res, errors.array());
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
}
|
||||
2
dmtp/server/src/queues/indes.ts
Normal file
2
dmtp/server/src/queues/indes.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { addVerificationJob, getQueueStats, verificationQueue } from './verification.queue';
|
||||
|
||||
87
dmtp/server/src/queues/verification.queue.ts
Normal file
87
dmtp/server/src/queues/verification.queue.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import Queue, { Job } from 'bull';
|
||||
import { queueOptions } from '../config/redis.config';
|
||||
|
||||
// Job data interface
|
||||
export interface VerificationJobData {
|
||||
submissionId: string;
|
||||
taskId: string;
|
||||
workerId: string;
|
||||
submissionData: any;
|
||||
verificationCriteria: any;
|
||||
taskType: string;
|
||||
}
|
||||
|
||||
// Create verification queue
|
||||
export const verificationQueue = new Queue<VerificationJobData>(
|
||||
'verification-queue',
|
||||
queueOptions
|
||||
);
|
||||
|
||||
// Queue event listeners
|
||||
verificationQueue.on('waiting', (jobId) => {
|
||||
console.log(`🕐 Job ${jobId} is waiting...`);
|
||||
});
|
||||
|
||||
verificationQueue.on('active', (job: Job<VerificationJobData>) => {
|
||||
console.log(`🔄 Job ${job.id} started processing...`);
|
||||
});
|
||||
|
||||
verificationQueue.on('completed', (job: Job<VerificationJobData>, result: any) => {
|
||||
console.log(`✅ Job ${job.id} completed successfully`);
|
||||
});
|
||||
|
||||
verificationQueue.on('failed', (job: Job<VerificationJobData>, err: Error) => {
|
||||
console.error(`❌ Job ${job?.id} failed:`, err.message);
|
||||
});
|
||||
|
||||
verificationQueue.on('error', (error: Error) => {
|
||||
console.error('❌ Queue error:', error);
|
||||
});
|
||||
|
||||
verificationQueue.on('stalled', (job: Job<VerificationJobData>) => {
|
||||
console.warn(`⚠️ Job ${job.id} stalled`);
|
||||
});
|
||||
|
||||
/**
|
||||
* Add verification job to queue
|
||||
*/
|
||||
export async function addVerificationJob(data: VerificationJobData): Promise<Job<VerificationJobData>> {
|
||||
console.log(`\n➕ Adding verification job for submission ${data.submissionId}`);
|
||||
|
||||
const job = await verificationQueue.add('verify-submission', data, {
|
||||
jobId: `verify-${data.submissionId}`, // Unique job ID
|
||||
attempts: 3,
|
||||
backoff: {
|
||||
type: 'exponential',
|
||||
delay: 2000,
|
||||
},
|
||||
timeout: 30000,
|
||||
});
|
||||
|
||||
console.log(`✅ Job added to queue: ${job.id}`);
|
||||
return job;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get queue statistics
|
||||
*/
|
||||
export async function getQueueStats() {
|
||||
const [waiting, active, completed, failed, delayed] = await Promise.all([
|
||||
verificationQueue.getWaitingCount(),
|
||||
verificationQueue.getActiveCount(),
|
||||
verificationQueue.getCompletedCount(),
|
||||
verificationQueue.getFailedCount(),
|
||||
verificationQueue.getDelayedCount(),
|
||||
]);
|
||||
|
||||
return {
|
||||
waiting,
|
||||
active,
|
||||
completed,
|
||||
failed,
|
||||
delayed,
|
||||
total: waiting + active + completed + failed + delayed,
|
||||
};
|
||||
}
|
||||
|
||||
export default verificationQueue;
|
||||
64
dmtp/server/src/routes/index.js
Normal file
64
dmtp/server/src/routes/index.js
Normal file
@@ -0,0 +1,64 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const express_1 = require("express");
|
||||
const submission_routes_1 = __importDefault(require("./submission.routes"));
|
||||
const task_routes_1 = __importDefault(require("./task.routes"));
|
||||
const user_routes_1 = __importDefault(require("./user.routes"));
|
||||
const payment_routes_1 = __importDefault(require("./payment.routes"));
|
||||
const router = (0, express_1.Router)();
|
||||
// Health check
|
||||
router.get('/health', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Server is healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
});
|
||||
// API routes
|
||||
router.use('/tasks', task_routes_1.default);
|
||||
router.use('/submissions', submission_routes_1.default);
|
||||
router.use('/users', user_routes_1.default);
|
||||
router.use('/payments', payment_routes_1.default);
|
||||
// Test routes (development only)
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
Promise.resolve().then(() => __importStar(require('./test.routes'))).then((testRoutes) => {
|
||||
router.use('/test', testRoutes.default);
|
||||
});
|
||||
}
|
||||
exports.default = router;
|
||||
31
dmtp/server/src/routes/index.ts
Normal file
31
dmtp/server/src/routes/index.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { Router } from 'express';
|
||||
import submissionRoutes from './submission.routes';
|
||||
import taskRoutes from './task.routes';
|
||||
import userRoutes from './user.routes';
|
||||
import paymentRoutes from './payment.routes';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Health check
|
||||
router.get('/health', (req, res) => {
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Server is healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
});
|
||||
|
||||
// API routes
|
||||
router.use('/tasks', taskRoutes);
|
||||
router.use('/submissions', submissionRoutes);
|
||||
router.use('/users', userRoutes);
|
||||
router.use('/payments', paymentRoutes);
|
||||
|
||||
// Test routes (development only)
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
import('./test.routes').then((testRoutes) => {
|
||||
router.use('/test', testRoutes.default);
|
||||
});
|
||||
}
|
||||
|
||||
export default router;
|
||||
10
dmtp/server/src/routes/moderation.routes.js
Normal file
10
dmtp/server/src/routes/moderation.routes.js
Normal file
@@ -0,0 +1,10 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const express_1 = require("express");
|
||||
const submission_controller_1 = require("../controllers/submission.controller");
|
||||
const router = (0, express_1.Router)();
|
||||
// Submit task (with moderation)
|
||||
router.post('/submissions', submission_controller_1.SubmissionController.submitTask);
|
||||
// Get moderation stats (admin only)
|
||||
router.get('/moderation/stats', submission_controller_1.SubmissionController.getModerationStats);
|
||||
exports.default = router;
|
||||
13
dmtp/server/src/routes/moderation.routes.ts
Normal file
13
dmtp/server/src/routes/moderation.routes.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { Router } from 'express';
|
||||
import { SubmissionController } from '../controllers/submission.controller';
|
||||
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Submit task (with moderation)
|
||||
router.post('/submissions', SubmissionController.submitTask);
|
||||
|
||||
// Get moderation stats (admin only)
|
||||
router.get('/moderation/stats', SubmissionController.getModerationStats);
|
||||
|
||||
export default router;
|
||||
71
dmtp/server/src/routes/payment.routes.js
Normal file
71
dmtp/server/src/routes/payment.routes.js
Normal file
@@ -0,0 +1,71 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const express_1 = require("express");
|
||||
const submission_controller_1 = require("../controllers/submission.controller");
|
||||
const auth_middleware_1 = require("../middlewares/auth.middleware");
|
||||
const rate_limit_middleware_1 = require("../middlewares/rate-limit.middleware");
|
||||
const router = (0, express_1.Router)();
|
||||
/**
|
||||
* Payment Routes
|
||||
*/
|
||||
/**
|
||||
* GET /api/payments/:submissionId
|
||||
* Get detailed payment status for a submission
|
||||
*/
|
||||
router.get("/:submissionId", rate_limit_middleware_1.rateLimiters.general, auth_middleware_1.AuthMiddleware.verifyWallet, submission_controller_1.SubmissionController.getPaymentStatus);
|
||||
/**
|
||||
* GET /api/payments/stats/worker
|
||||
* Get worker payment statistics
|
||||
*/
|
||||
router.get("/stats/worker", rate_limit_middleware_1.rateLimiters.general, auth_middleware_1.AuthMiddleware.verifyWallet, async (req, res) => {
|
||||
try {
|
||||
const { paymentService } = await Promise.resolve().then(() => __importStar(require("../services/payment.service")));
|
||||
const userId = req.user.userId;
|
||||
const stats = await paymentService.getPaymentStats(userId);
|
||||
res.json({
|
||||
success: true,
|
||||
data: stats,
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Failed to fetch payment stats:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to fetch payment statistics",
|
||||
});
|
||||
}
|
||||
});
|
||||
exports.default = router;
|
||||
53
dmtp/server/src/routes/payment.routes.ts
Normal file
53
dmtp/server/src/routes/payment.routes.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { Router } from "express";
|
||||
import { SubmissionController } from "../controllers/submission.controller";
|
||||
import { AuthMiddleware } from "../middlewares/auth.middleware";
|
||||
import { rateLimiters } from "../middlewares/rate-limit.middleware";
|
||||
import { ValidationMiddleware } from "../middlewares/validation.middleware";
|
||||
import { AuthenticatedRequest } from "../types/api.types";
|
||||
|
||||
const router = Router();
|
||||
|
||||
/**
|
||||
* Payment Routes
|
||||
*/
|
||||
|
||||
/**
|
||||
* GET /api/payments/:submissionId
|
||||
* Get detailed payment status for a submission
|
||||
*/
|
||||
router.get(
|
||||
"/:submissionId",
|
||||
rateLimiters.general,
|
||||
AuthMiddleware.verifyWallet,
|
||||
SubmissionController.getPaymentStatus
|
||||
);
|
||||
|
||||
/**
|
||||
* GET /api/payments/stats/worker
|
||||
* Get worker payment statistics
|
||||
*/
|
||||
router.get(
|
||||
"/stats/worker",
|
||||
rateLimiters.general,
|
||||
AuthMiddleware.verifyWallet,
|
||||
async (req: AuthenticatedRequest, res) => {
|
||||
try {
|
||||
const { paymentService } = await import("../services/payment.service");
|
||||
const userId = req.user!.userId!;
|
||||
|
||||
const stats = await paymentService.getPaymentStats(userId);
|
||||
res.json({
|
||||
success: true,
|
||||
data: stats,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch payment stats:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
error: "Failed to fetch payment statistics",
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
export default router;
|
||||
18
dmtp/server/src/routes/submission.routes.js
Normal file
18
dmtp/server/src/routes/submission.routes.js
Normal file
@@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const express_1 = require("express");
|
||||
const submission_controller_1 = require("../controllers/submission.controller");
|
||||
const auth_middleware_1 = require("../middlewares/auth.middleware");
|
||||
const rate_limit_middleware_1 = require("../middlewares/rate-limit.middleware");
|
||||
const validation_middleware_1 = require("../middlewares/validation.middleware");
|
||||
const submission_validator_1 = require("../validators/submission.validator");
|
||||
const router = (0, express_1.Router)();
|
||||
// Submit task (authenticated, rate limited)
|
||||
router.post('/submit', rate_limit_middleware_1.rateLimiters.perWallet, auth_middleware_1.AuthMiddleware.verifyWallet, validation_middleware_1.ValidationMiddleware.validate(submission_validator_1.submissionValidators.submit), submission_controller_1.SubmissionController.submitTask);
|
||||
// Verification webhook (internal, strict rate limit)
|
||||
router.post('/verify-webhook', rate_limit_middleware_1.rateLimiters.strict, validation_middleware_1.ValidationMiddleware.validate(submission_validator_1.submissionValidators.verifyWebhook), submission_controller_1.SubmissionController.verifyWebhook);
|
||||
// Get submission status (authenticated)
|
||||
router.get('/:submissionId/status', rate_limit_middleware_1.rateLimiters.general, auth_middleware_1.AuthMiddleware.verifyWallet, validation_middleware_1.ValidationMiddleware.validate(submission_validator_1.submissionValidators.getStatus), submission_controller_1.SubmissionController.getSubmissionStatus);
|
||||
// Get my submissions (authenticated)
|
||||
router.get('/my/submissions', rate_limit_middleware_1.rateLimiters.perWallet, auth_middleware_1.AuthMiddleware.verifyWallet, submission_controller_1.SubmissionController.getMySubmissions);
|
||||
exports.default = router;
|
||||
44
dmtp/server/src/routes/submission.routes.ts
Normal file
44
dmtp/server/src/routes/submission.routes.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { Router } from 'express';
|
||||
import { SubmissionController } from '../controllers/submission.controller';
|
||||
import { AuthMiddleware } from '../middlewares/auth.middleware';
|
||||
import { rateLimiters } from '../middlewares/rate-limit.middleware';
|
||||
import { ValidationMiddleware } from '../middlewares/validation.middleware';
|
||||
import { submissionValidators } from '../validators/submission.validator';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Submit task (authenticated, rate limited)
|
||||
router.post(
|
||||
'/submit',
|
||||
rateLimiters.perWallet,
|
||||
AuthMiddleware.verifyWallet,
|
||||
ValidationMiddleware.validate(submissionValidators.submit),
|
||||
SubmissionController.submitTask
|
||||
);
|
||||
|
||||
// Verification webhook (internal, strict rate limit)
|
||||
router.post(
|
||||
'/verify-webhook',
|
||||
rateLimiters.strict,
|
||||
ValidationMiddleware.validate(submissionValidators.verifyWebhook),
|
||||
SubmissionController.verifyWebhook
|
||||
);
|
||||
|
||||
// Get submission status (authenticated)
|
||||
router.get(
|
||||
'/:submissionId/status',
|
||||
rateLimiters.general,
|
||||
AuthMiddleware.verifyWallet,
|
||||
ValidationMiddleware.validate(submissionValidators.getStatus),
|
||||
SubmissionController.getSubmissionStatus
|
||||
);
|
||||
|
||||
// Get my submissions (authenticated)
|
||||
router.get(
|
||||
'/my/submissions',
|
||||
rateLimiters.perWallet,
|
||||
AuthMiddleware.verifyWallet,
|
||||
SubmissionController.getMySubmissions
|
||||
);
|
||||
|
||||
export default router;
|
||||
18
dmtp/server/src/routes/task.routes.js
Normal file
18
dmtp/server/src/routes/task.routes.js
Normal file
@@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const express_1 = require("express");
|
||||
const task_controller_1 = require("../controllers/task.controller");
|
||||
const auth_middleware_1 = require("../middlewares/auth.middleware");
|
||||
const rate_limit_middleware_1 = require("../middlewares/rate-limit.middleware");
|
||||
const validation_middleware_1 = require("../middlewares/validation.middleware");
|
||||
const task_validator_1 = require("../validators/task.validator");
|
||||
const router = (0, express_1.Router)();
|
||||
// Create task (authenticated, rate limited)
|
||||
router.post('/create', rate_limit_middleware_1.rateLimiters.perWallet, auth_middleware_1.AuthMiddleware.verifyWallet, validation_middleware_1.ValidationMiddleware.validate(task_validator_1.taskValidators.create), task_controller_1.TaskController.createTask);
|
||||
// List tasks (public, with optional auth)
|
||||
router.get('/list', rate_limit_middleware_1.rateLimiters.general, validation_middleware_1.ValidationMiddleware.validate(task_validator_1.taskValidators.list), task_controller_1.TaskController.listTasks);
|
||||
// Get single task (public)
|
||||
router.get('/:taskId', rate_limit_middleware_1.rateLimiters.general, validation_middleware_1.ValidationMiddleware.validate(task_validator_1.taskValidators.getById), task_controller_1.TaskController.getTask);
|
||||
// Get my tasks (authenticated)
|
||||
router.get('/my/tasks', rate_limit_middleware_1.rateLimiters.perWallet, auth_middleware_1.AuthMiddleware.verifyWallet, task_controller_1.TaskController.getMyTasks);
|
||||
exports.default = router;
|
||||
43
dmtp/server/src/routes/task.routes.ts
Normal file
43
dmtp/server/src/routes/task.routes.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { Router } from 'express';
|
||||
import { TaskController } from '../controllers/task.controller';
|
||||
import { AuthMiddleware } from '../middlewares/auth.middleware';
|
||||
import { rateLimiters } from '../middlewares/rate-limit.middleware';
|
||||
import { ValidationMiddleware } from '../middlewares/validation.middleware';
|
||||
import { taskValidators } from '../validators/task.validator';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Create task (authenticated, rate limited)
|
||||
router.post(
|
||||
'/create',
|
||||
rateLimiters.perWallet,
|
||||
AuthMiddleware.verifyWallet,
|
||||
ValidationMiddleware.validate(taskValidators.create),
|
||||
TaskController.createTask
|
||||
);
|
||||
|
||||
// List tasks (public, with optional auth)
|
||||
router.get(
|
||||
'/list',
|
||||
rateLimiters.general,
|
||||
ValidationMiddleware.validate(taskValidators.list),
|
||||
TaskController.listTasks
|
||||
);
|
||||
|
||||
// Get single task (public)
|
||||
router.get(
|
||||
'/:taskId',
|
||||
rateLimiters.general,
|
||||
ValidationMiddleware.validate(taskValidators.getById),
|
||||
TaskController.getTask
|
||||
);
|
||||
|
||||
// Get my tasks (authenticated)
|
||||
router.get(
|
||||
'/my/tasks',
|
||||
rateLimiters.perWallet,
|
||||
AuthMiddleware.verifyWallet,
|
||||
TaskController.getMyTasks
|
||||
);
|
||||
|
||||
export default router;
|
||||
159
dmtp/server/src/routes/test.routes.ts
Normal file
159
dmtp/server/src/routes/test.routes.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { Request, Response, Router } from 'express';
|
||||
import { prisma } from '../database/connections';
|
||||
import { addVerificationJob } from '../queues/verification.queue';
|
||||
import { ResponseUtil } from '../utils/response.util';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Only enable in development
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
/**
|
||||
* POST /api/test/create-submission
|
||||
* Create a test submission for testing the queue
|
||||
*/
|
||||
router.post('/create-submission', async (req: Request, res: Response) => {
|
||||
try {
|
||||
// 1. Get or create test user
|
||||
let testUser = await prisma.user.findFirst({
|
||||
where: { walletAddress: '0xtest1234567890' },
|
||||
});
|
||||
|
||||
if (!testUser) {
|
||||
testUser = await prisma.user.create({
|
||||
data: {
|
||||
walletAddress: '0xtest1234567890',
|
||||
role: 'worker',
|
||||
reputationScore: 0,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// 2. Get or create test task
|
||||
let testTask = await prisma.task.findFirst({
|
||||
where: { title: 'Test Task for Queue' },
|
||||
});
|
||||
|
||||
if (!testTask) {
|
||||
// Find a requester or create one
|
||||
let requester = await prisma.user.findFirst({
|
||||
where: { role: 'requester' },
|
||||
});
|
||||
|
||||
if (!requester) {
|
||||
requester = await prisma.user.create({
|
||||
data: {
|
||||
walletAddress: '0xrequester123',
|
||||
role: 'requester',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
testTask = await prisma.task.create({
|
||||
data: {
|
||||
requesterId: requester.id,
|
||||
title: 'Test Task for Queue',
|
||||
description: 'This is a test task',
|
||||
taskType: 'text_verification',
|
||||
paymentAmount: 5.0,
|
||||
verificationCriteria: {
|
||||
requiredFields: ['text'],
|
||||
aiPrompt: 'Verify if the text makes sense',
|
||||
},
|
||||
maxSubmissions: 10,
|
||||
expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000),
|
||||
status: 'open',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// 3. Create test submission
|
||||
const submission = await prisma.submission.create({
|
||||
data: {
|
||||
taskId: testTask.id,
|
||||
workerId: testUser.id,
|
||||
submissionData: {
|
||||
text: req.body.text || 'This is a test submission for the verification queue',
|
||||
},
|
||||
verificationStatus: 'pending',
|
||||
},
|
||||
});
|
||||
|
||||
// 4. Add to queue
|
||||
await addVerificationJob({
|
||||
submissionId: submission.id,
|
||||
taskId: testTask.id,
|
||||
workerId: testUser.id,
|
||||
submissionData: submission.submissionData,
|
||||
verificationCriteria: testTask.verificationCriteria,
|
||||
taskType: testTask.taskType,
|
||||
});
|
||||
|
||||
ResponseUtil.success(res, {
|
||||
message: 'Test submission created and added to queue',
|
||||
submissionId: submission.id,
|
||||
taskId: testTask.id,
|
||||
}, 201);
|
||||
} catch (error) {
|
||||
console.error('Test submission error:', error);
|
||||
ResponseUtil.internalError(res, `Failed to create test submission: ${error}`);
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* GET /api/test/queue-stats
|
||||
* Get queue statistics
|
||||
*/
|
||||
router.get('/queue-stats', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { getQueueStats } = await import('../queues/verification.queue');
|
||||
const stats = await getQueueStats();
|
||||
|
||||
ResponseUtil.success(res, stats);
|
||||
} catch (error) {
|
||||
console.error('Queue stats error:', error);
|
||||
ResponseUtil.internalError(res, 'Failed to get queue stats');
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* POST /api/test/reprocess/:submissionId
|
||||
* Re-enqueue an existing approved/failed submission to finalize payment or retry processing
|
||||
*/
|
||||
router.post('/reprocess/:submissionId', async (req: Request, res: Response) => {
|
||||
try {
|
||||
const { submissionId } = req.params;
|
||||
|
||||
const submission = await prisma.submission.findUnique({
|
||||
where: { id: submissionId },
|
||||
include: { task: true },
|
||||
});
|
||||
|
||||
if (!submission) {
|
||||
return ResponseUtil.notFound(res, 'Submission');
|
||||
}
|
||||
|
||||
await addVerificationJob({
|
||||
submissionId: submission.id,
|
||||
taskId: submission.taskId,
|
||||
workerId: submission.workerId,
|
||||
submissionData: submission.submissionData,
|
||||
verificationCriteria: submission.task.verificationCriteria,
|
||||
taskType: submission.task.taskType,
|
||||
});
|
||||
|
||||
return ResponseUtil.success(res, {
|
||||
message: 'Submission re-enqueued for processing',
|
||||
submissionId: submission.id,
|
||||
taskId: submission.taskId,
|
||||
status: submission.verificationStatus,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Reprocess submission error:', error);
|
||||
return ResponseUtil.internalError(res, 'Failed to re-enqueue submission');
|
||||
}
|
||||
});
|
||||
|
||||
console.log('⚠️ Test routes enabled (development mode only)');
|
||||
}
|
||||
|
||||
export default router;
|
||||
33
dmtp/server/src/routes/user.routes.ts
Normal file
33
dmtp/server/src/routes/user.routes.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { Router } from 'express';
|
||||
import { UserController } from '../controllers/user.controller';
|
||||
import { AuthMiddleware } from '../middlewares/auth.middleware';
|
||||
import { rateLimiters } from '../middlewares/rate-limit.middleware';
|
||||
import { ValidationMiddleware } from '../middlewares/validation.middleware';
|
||||
import { userValidators } from '../validators/user.validator';
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Register user (strict rate limit)
|
||||
router.post(
|
||||
'/register',
|
||||
rateLimiters.strict,
|
||||
ValidationMiddleware.validate(userValidators.register),
|
||||
UserController.register
|
||||
);
|
||||
|
||||
// Get profile (authenticated)
|
||||
router.get(
|
||||
'/profile',
|
||||
rateLimiters.perWallet,
|
||||
AuthMiddleware.verifyWallet,
|
||||
UserController.getProfile
|
||||
);
|
||||
|
||||
// Get public profile
|
||||
router.get(
|
||||
'/:walletAddress/public',
|
||||
rateLimiters.general,
|
||||
UserController.getPublicProfile
|
||||
);
|
||||
|
||||
export default router;
|
||||
24
dmtp/server/src/server.ts
Normal file
24
dmtp/server/src/server.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import 'dotenv/config';
|
||||
import { createApp } from './app';
|
||||
|
||||
const PORT = process.env.PORT || 3001;
|
||||
|
||||
const app = createApp();
|
||||
|
||||
app.listen(PORT, () => {
|
||||
console.log(`
|
||||
╔═══════════════════════════════════════════════════════════╗
|
||||
║ ║
|
||||
║ 🚀 Celo Task Marketplace API Server ║
|
||||
║ ║
|
||||
║ 📡 Server running on: http://localhost:${PORT} ║
|
||||
║ 🌍 Environment: ${process.env.NODE_ENV || 'development'} ║
|
||||
║ ⛓️ Network: Celo Sepolia Testnet ║
|
||||
║ ║
|
||||
╚═══════════════════════════════════════════════════════════╝
|
||||
`);
|
||||
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
console.log('⚠️ Test routes enabled (development mode only)');
|
||||
}
|
||||
});
|
||||
245
dmtp/server/src/services/ai-verification.service.ts
Normal file
245
dmtp/server/src/services/ai-verification.service.ts
Normal file
@@ -0,0 +1,245 @@
|
||||
import { PROMPTS } from '../config/ai.config';
|
||||
import {
|
||||
GeminiResponse,
|
||||
ImageVerificationInput,
|
||||
TextVerificationInput,
|
||||
VerificationResult,
|
||||
} from '../types/ai.types';
|
||||
import { cacheService } from './cache.service';
|
||||
import { geminiService } from './gemini.service';
|
||||
|
||||
export class AIVerificationService {
|
||||
/**
|
||||
* Verify text-based task submission
|
||||
*/
|
||||
async verifyTextTask(input: TextVerificationInput): Promise<VerificationResult> {
|
||||
console.log('\n🔍 Starting text verification...');
|
||||
console.log(`Task Type: ${input.taskType || 'generic'}`);
|
||||
|
||||
// Check cache first
|
||||
const cacheKey = cacheService.generateKey('text_verification', input);
|
||||
const cached = await cacheService.get<VerificationResult>(cacheKey);
|
||||
|
||||
if (cached) {
|
||||
console.log('✅ Returning cached verification result');
|
||||
return cached;
|
||||
}
|
||||
|
||||
try {
|
||||
// Build prompt from template
|
||||
const prompt = PROMPTS.TEXT_VERIFICATION
|
||||
.replace('{verificationCriteria}', input.verificationCriteria)
|
||||
.replace('{submissionText}', input.submissionText);
|
||||
|
||||
// Call Gemini API
|
||||
console.log('📤 Sending prompt to Gemini...');
|
||||
const responseText = await geminiService.generateText(prompt);
|
||||
console.log('📥 Received response from Gemini');
|
||||
console.log('Response length:', responseText.length, 'chars');
|
||||
|
||||
// Parse JSON response
|
||||
let geminiResponse: GeminiResponse;
|
||||
try {
|
||||
geminiResponse = geminiService.parseJsonResponse<GeminiResponse>(responseText);
|
||||
} catch (parseError) {
|
||||
console.error('❌ JSON parse failed, attempting recovery...');
|
||||
|
||||
// Attempt to extract key information from text response
|
||||
const approved = /approved['":\s]+true/i.test(responseText);
|
||||
const scoreMatch = responseText.match(/score['":\s]+(\d+)/i);
|
||||
const score = scoreMatch ? parseInt(scoreMatch[1]) : 0;
|
||||
|
||||
geminiResponse = {
|
||||
approved,
|
||||
score,
|
||||
reasoning: 'Auto-extracted from malformed response: ' + responseText.substring(0, 200),
|
||||
violations: [],
|
||||
};
|
||||
|
||||
console.log('⚠️ Using fallback parsing:', geminiResponse);
|
||||
}
|
||||
|
||||
// Build verification result
|
||||
const result: VerificationResult = {
|
||||
approved: geminiResponse.approved,
|
||||
score: geminiResponse.score,
|
||||
reasoning: geminiResponse.reasoning,
|
||||
violations: geminiResponse.violations || [],
|
||||
timestamp: new Date().toISOString(),
|
||||
geminiResponse: responseText,
|
||||
};
|
||||
|
||||
// Cache the result
|
||||
await cacheService.set(cacheKey, result);
|
||||
|
||||
console.log('✅ Text verification completed');
|
||||
console.log(`Result: ${result.approved ? 'APPROVED' : 'REJECTED'} (Score: ${result.score})`);
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.error('❌ Text verification failed:', error);
|
||||
throw new Error(`Text verification failed: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify image-based task submission
|
||||
*/
|
||||
async verifyImageTask(input: ImageVerificationInput): Promise<VerificationResult> {
|
||||
console.log('\n🔍 Starting image verification...');
|
||||
console.log(`Image URL: ${input.imageUrl}`);
|
||||
|
||||
// Check cache first
|
||||
const cacheKey = cacheService.generateKey('image_verification', input);
|
||||
const cached = await cacheService.get<VerificationResult>(cacheKey);
|
||||
|
||||
if (cached) {
|
||||
console.log('✅ Returning cached verification result');
|
||||
return cached;
|
||||
}
|
||||
|
||||
try {
|
||||
// Validate image URL
|
||||
if (!this.isValidImageUrl(input.imageUrl)) {
|
||||
throw new Error('Invalid image URL');
|
||||
}
|
||||
|
||||
// Build prompt from template
|
||||
const prompt = PROMPTS.IMAGE_VERIFICATION
|
||||
.replace('{taskDescription}', input.taskDescription)
|
||||
.replace('{verificationCriteria}', input.verificationCriteria);
|
||||
|
||||
// Call Gemini Vision API
|
||||
const responseText = await geminiService.generateFromImage(
|
||||
prompt,
|
||||
input.imageUrl
|
||||
);
|
||||
|
||||
// Parse JSON response
|
||||
const geminiResponse = geminiService.parseJsonResponse<GeminiResponse>(responseText);
|
||||
|
||||
// Build verification result
|
||||
const result: VerificationResult = {
|
||||
approved: geminiResponse.approved,
|
||||
score: geminiResponse.score,
|
||||
reasoning: geminiResponse.reasoning,
|
||||
issues: geminiResponse.issues || [],
|
||||
imageQuality: geminiResponse.imageQuality || 'good',
|
||||
timestamp: new Date().toISOString(),
|
||||
geminiResponse: responseText,
|
||||
};
|
||||
|
||||
// Cache the result
|
||||
await cacheService.set(cacheKey, result);
|
||||
|
||||
console.log('✅ Image verification completed');
|
||||
console.log(`Result: ${result.approved ? 'APPROVED' : 'REJECTED'} (Score: ${result.score})`);
|
||||
console.log(`Image Quality: ${result.imageQuality}`);
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.error('❌ Image verification failed:', error);
|
||||
throw new Error(`Image verification failed: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify survey submission (convenience method)
|
||||
*/
|
||||
async verifySurveySubmission(
|
||||
answers: Record<string, any>,
|
||||
expectedFormat: string
|
||||
): Promise<VerificationResult> {
|
||||
return this.verifyTextTask({
|
||||
submissionText: JSON.stringify(answers, null, 2),
|
||||
verificationCriteria: `Survey must follow this format: ${expectedFormat}. Check if all required questions are answered and responses are valid.`,
|
||||
taskType: 'survey',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify content moderation task
|
||||
*/
|
||||
async verifyContentModeration(
|
||||
content: string,
|
||||
moderationGuidelines: string
|
||||
): Promise<VerificationResult> {
|
||||
return this.verifyTextTask({
|
||||
submissionText: content,
|
||||
verificationCriteria: `Moderation Guidelines: ${moderationGuidelines}. Check if the content violates any guidelines. Be thorough in identifying harmful, inappropriate, or spam content.`,
|
||||
taskType: 'content_moderation',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch verify multiple submissions (cost-optimized)
|
||||
*/
|
||||
async batchVerify(
|
||||
submissions: Array<TextVerificationInput | ImageVerificationInput>
|
||||
): Promise<VerificationResult[]> {
|
||||
console.log(`\n📦 Batch verifying ${submissions.length} submissions...`);
|
||||
|
||||
const results: VerificationResult[] = [];
|
||||
|
||||
for (const submission of submissions) {
|
||||
try {
|
||||
let result: VerificationResult;
|
||||
|
||||
if ('imageUrl' in submission) {
|
||||
result = await this.verifyImageTask(submission);
|
||||
} else {
|
||||
result = await this.verifyTextTask(submission);
|
||||
}
|
||||
|
||||
results.push(result);
|
||||
} catch (error) {
|
||||
console.error('Batch verification error:', error);
|
||||
// Push failed result
|
||||
results.push({
|
||||
approved: false,
|
||||
score: 0,
|
||||
reasoning: `Verification failed: ${error}`,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`✅ Batch verification completed: ${results.length} results`);
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get service health status
|
||||
*/
|
||||
async getHealthStatus() {
|
||||
const rateLimitStatus = geminiService.getRateLimitStatus();
|
||||
const cacheStats = cacheService.getStats();
|
||||
|
||||
return {
|
||||
status: 'healthy',
|
||||
rateLimit: rateLimitStatus,
|
||||
cache: cacheStats,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper: Validate image URL
|
||||
*/
|
||||
private isValidImageUrl(url: string): boolean {
|
||||
try {
|
||||
const parsed = new URL(url);
|
||||
const validProtocols = ['http:', 'https:'];
|
||||
const validExtensions = ['.jpg', '.jpeg', '.png', '.gif', '.webp'];
|
||||
|
||||
return (
|
||||
validProtocols.includes(parsed.protocol) &&
|
||||
validExtensions.some((ext) => parsed.pathname.toLowerCase().endsWith(ext))
|
||||
);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const aiVerificationService = new AIVerificationService();
|
||||
318
dmtp/server/src/services/blockchain.service.ts
Normal file
318
dmtp/server/src/services/blockchain.service.ts
Normal file
@@ -0,0 +1,318 @@
|
||||
import { ethers } from 'ethers';
|
||||
import { readFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
const TaskEscrowABI = JSON.parse(
|
||||
readFileSync(
|
||||
join(__dirname, '../../artifacts/contracts/TaskEscrow.sol/TaskEscrow.json'),
|
||||
'utf8'
|
||||
)
|
||||
);
|
||||
|
||||
export class BlockchainService {
|
||||
private provider: ethers.JsonRpcProvider;
|
||||
private contract: ethers.Contract;
|
||||
private signer: ethers.Wallet;
|
||||
|
||||
constructor() {
|
||||
// Initialize provider
|
||||
this.provider = new ethers.JsonRpcProvider(
|
||||
process.env.CELO_RPC_URL || 'https://forno.celo-sepolia.celo-testnet.org'
|
||||
);
|
||||
|
||||
// Initialize signer
|
||||
const privateKey = process.env.PRIVATE_KEY;
|
||||
if (!privateKey) {
|
||||
throw new Error('PRIVATE_KEY not configured');
|
||||
}
|
||||
this.signer = new ethers.Wallet(privateKey, this.provider);
|
||||
|
||||
// Initialize contract
|
||||
const contractAddress = process.env.CONTRACT_ADDRESS;
|
||||
if (!contractAddress) {
|
||||
throw new Error('CONTRACT_ADDRESS not configured');
|
||||
}
|
||||
this.contract = new ethers.Contract(
|
||||
contractAddress,
|
||||
TaskEscrowABI.abi,
|
||||
this.signer
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create task on blockchain
|
||||
*/
|
||||
async createTask(paymentAmount: string, durationInDays: number): Promise<{
|
||||
taskId: number;
|
||||
txHash: string;
|
||||
}> {
|
||||
try {
|
||||
console.log(`Creating task on blockchain: ${paymentAmount} cUSD for ${durationInDays} days`);
|
||||
|
||||
const tx = await this.contract.createTask(
|
||||
ethers.parseEther(paymentAmount),
|
||||
durationInDays
|
||||
);
|
||||
|
||||
console.log(`Transaction sent: ${tx.hash}`);
|
||||
const receipt = await tx.wait();
|
||||
console.log(`Transaction confirmed in block ${receipt.blockNumber}`);
|
||||
|
||||
// Get taskId from event
|
||||
const event = receipt.logs.find((log: any) => {
|
||||
try {
|
||||
return this.contract.interface.parseLog(log)?.name === 'TaskCreated';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
const parsedEvent = this.contract.interface.parseLog(event as any);
|
||||
const taskId = Number(parsedEvent?.args[0]);
|
||||
|
||||
return {
|
||||
taskId,
|
||||
txHash: receipt.hash,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Blockchain createTask error:', error);
|
||||
throw new Error(`Failed to create task on blockchain: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Assign worker to a task
|
||||
*/
|
||||
async assignWorker(taskId: number, workerAddress: string): Promise<string> {
|
||||
try {
|
||||
console.log(`Assigning worker ${workerAddress} to task ${taskId} on blockchain`);
|
||||
|
||||
const tx = await this.contract.assignWorker(taskId, workerAddress);
|
||||
console.log(`Transaction sent: ${tx.hash}`);
|
||||
|
||||
const receipt = await tx.wait();
|
||||
console.log(`Worker assigned! Tx: ${receipt.hash}`);
|
||||
|
||||
return receipt.hash;
|
||||
} catch (error) {
|
||||
console.error('Blockchain assignWorker error:', error);
|
||||
throw new Error(`Failed to assign worker: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Approve submission and release payment
|
||||
*/
|
||||
async approveSubmission(taskId: number): Promise<string> {
|
||||
try {
|
||||
console.log(`Approving submission for task ${taskId} on blockchain`);
|
||||
|
||||
// First, verify the task exists on-chain
|
||||
try {
|
||||
const taskData = await this.contract.tasks(taskId);
|
||||
console.log('Task data from blockchain:', {
|
||||
taskId: taskData.taskId.toString(),
|
||||
requester: taskData.requester,
|
||||
worker: taskData.worker,
|
||||
status: taskData.status,
|
||||
paymentAmount: ethers.formatEther(taskData.paymentAmount),
|
||||
});
|
||||
|
||||
if (taskData.requester === ethers.ZeroAddress) {
|
||||
throw new Error(`Task ${taskId} does not exist on blockchain`);
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error('❌ Failed to fetch task data:', error.message);
|
||||
const contractAddr = this.contract.target;
|
||||
throw new Error(
|
||||
`Task ${taskId} not found on contract ${contractAddr}. ` +
|
||||
`This task may have been created on a different contract. ` +
|
||||
`Check CONTRACT_ADDRESS in .env matches the contract used to create this task.`
|
||||
);
|
||||
}
|
||||
|
||||
const tx = await this.contract.approveSubmission(taskId);
|
||||
console.log(`Transaction sent: ${tx.hash}`);
|
||||
|
||||
const receipt = await tx.wait();
|
||||
console.log(`Payment released! Tx: ${receipt.hash}`);
|
||||
|
||||
return receipt.hash;
|
||||
} catch (error: any) {
|
||||
console.error('Blockchain approveSubmission error:', error);
|
||||
|
||||
// Provide helpful error message
|
||||
const contractAddr = this.contract.target;
|
||||
let errorMsg = `Failed to approve submission for task ${taskId}. `;
|
||||
|
||||
if (error.message?.includes('Task does not exist') || error.message?.includes('not found')) {
|
||||
errorMsg += `Task not found on contract ${contractAddr}. `;
|
||||
errorMsg += `Ensure the task was created with this contract address.`;
|
||||
} else {
|
||||
errorMsg += error.message || 'Unknown blockchain error';
|
||||
}
|
||||
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reject submission and refund requester
|
||||
*/
|
||||
async rejectSubmission(taskId: number): Promise<string> {
|
||||
try {
|
||||
console.log(`Rejecting submission for task ${taskId} on blockchain`);
|
||||
|
||||
const tx = await this.contract.rejectSubmission(taskId);
|
||||
const receipt = await tx.wait();
|
||||
|
||||
return receipt.hash;
|
||||
} catch (error) {
|
||||
console.error('Blockchain rejectSubmission error:', error);
|
||||
throw new Error(`Failed to reject submission: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check cUSD balance
|
||||
*/
|
||||
async getCUSDBalance(walletAddress: string): Promise<string> {
|
||||
try {
|
||||
const cUSDAddress = process.env.CUSD_SEPOLIA_ADDRESS;
|
||||
if (!cUSDAddress) {
|
||||
throw new Error('CUSD_SEPOLIA_ADDRESS not configured');
|
||||
}
|
||||
|
||||
const cUSDContract = new ethers.Contract(
|
||||
cUSDAddress,
|
||||
['function balanceOf(address) view returns (uint256)'],
|
||||
this.provider
|
||||
);
|
||||
|
||||
const balance = await cUSDContract.balanceOf(walletAddress);
|
||||
return ethers.formatEther(balance);
|
||||
} catch (error) {
|
||||
console.error('Get cUSD balance error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get task details from blockchain
|
||||
*/
|
||||
async getTask(taskId: number): Promise<{
|
||||
taskId: number;
|
||||
requester: string;
|
||||
worker: string;
|
||||
paymentAmount: string;
|
||||
status: number;
|
||||
createdAt: number;
|
||||
expiresAt: number;
|
||||
} | null> {
|
||||
try {
|
||||
const taskData = await this.contract.tasks(taskId);
|
||||
|
||||
// Check if task exists (requester is not zero address)
|
||||
if (taskData.requester === ethers.ZeroAddress) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
taskId: Number(taskData.taskId),
|
||||
requester: taskData.requester,
|
||||
worker: taskData.worker,
|
||||
paymentAmount: ethers.formatEther(taskData.paymentAmount),
|
||||
status: Number(taskData.status),
|
||||
createdAt: Number(taskData.createdAt),
|
||||
expiresAt: Number(taskData.expiresAt),
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Get task error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current task counter from blockchain
|
||||
*/
|
||||
async getTaskCounter(): Promise<number> {
|
||||
try {
|
||||
const counter = await this.contract.taskCounter();
|
||||
return Number(counter);
|
||||
} catch (error) {
|
||||
console.error('Get task counter error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get contract address being used
|
||||
*/
|
||||
getContractAddress(): string {
|
||||
return this.contract.target as string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has approved the contract to spend cUSD
|
||||
*/
|
||||
async checkAllowance(walletAddress: string): Promise<string> {
|
||||
try {
|
||||
const cUSDAddress = process.env.CUSD_SEPOLIA_ADDRESS;
|
||||
const contractAddress = process.env.CONTRACT_ADDRESS;
|
||||
|
||||
if (!cUSDAddress) {
|
||||
throw new Error('CUSD_SEPOLIA_ADDRESS not configured');
|
||||
}
|
||||
if (!contractAddress) {
|
||||
throw new Error('CONTRACT_ADDRESS not configured');
|
||||
}
|
||||
|
||||
const cUSDContract = new ethers.Contract(
|
||||
cUSDAddress,
|
||||
['function allowance(address owner, address spender) view returns (uint256)'],
|
||||
this.provider
|
||||
);
|
||||
|
||||
const allowance = await cUSDContract.allowance(walletAddress, contractAddress);
|
||||
return ethers.formatEther(allowance);
|
||||
} catch (error) {
|
||||
console.error('Check allowance error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Approve contract to spend cUSD tokens
|
||||
*/
|
||||
async approveCUSD(amount: string): Promise<string> {
|
||||
try {
|
||||
const cUSDAddress = process.env.CUSD_SEPOLIA_ADDRESS;
|
||||
const contractAddress = process.env.CONTRACT_ADDRESS;
|
||||
|
||||
if (!cUSDAddress) {
|
||||
throw new Error('CUSD_SEPOLIA_ADDRESS not configured');
|
||||
}
|
||||
if (!contractAddress) {
|
||||
throw new Error('CONTRACT_ADDRESS not configured');
|
||||
}
|
||||
|
||||
const cUSDContract = new ethers.Contract(
|
||||
cUSDAddress,
|
||||
['function approve(address spender, uint256 amount) returns (bool)'],
|
||||
this.signer
|
||||
);
|
||||
|
||||
console.log(`Approving ${amount} cUSD for TaskEscrow contract...`);
|
||||
const tx = await cUSDContract.approve(contractAddress, ethers.parseEther(amount));
|
||||
const receipt = await tx.wait();
|
||||
|
||||
console.log(`✅ Approval confirmed: ${receipt.hash}`);
|
||||
return receipt.hash;
|
||||
} catch (error) {
|
||||
console.error('Approve cUSD error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const blockchainService = new BlockchainService();
|
||||
82
dmtp/server/src/services/cache.service.ts
Normal file
82
dmtp/server/src/services/cache.service.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { createHash } from 'crypto';
|
||||
import { CacheConfig } from '../types/ai.types';
|
||||
|
||||
// Simple in-memory cache (use Redis in production)
|
||||
class CacheService {
|
||||
private cache: Map<string, { data: any; expiresAt: number }> = new Map();
|
||||
private config: CacheConfig;
|
||||
|
||||
constructor(config: CacheConfig) {
|
||||
this.config = config;
|
||||
|
||||
// Cleanup expired entries every 5 minutes
|
||||
setInterval(() => this.cleanup(), 5 * 60 * 1000);
|
||||
}
|
||||
|
||||
generateKey(prefix: string, data: any): string {
|
||||
const hash = createHash('sha256')
|
||||
.update(JSON.stringify(data))
|
||||
.digest('hex');
|
||||
return `${prefix}:${hash}`;
|
||||
}
|
||||
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
if (!this.config.enabled) return null;
|
||||
|
||||
const entry = this.cache.get(key);
|
||||
if (!entry) return null;
|
||||
|
||||
if (Date.now() > entry.expiresAt) {
|
||||
this.cache.delete(key);
|
||||
return null;
|
||||
}
|
||||
|
||||
console.log(`✅ Cache HIT: ${key}`);
|
||||
return entry.data as T;
|
||||
}
|
||||
|
||||
async set(key: string, data: any, ttl?: number): Promise<void> {
|
||||
if (!this.config.enabled) return;
|
||||
|
||||
const expiresAt = Date.now() + (ttl || this.config.ttl) * 1000;
|
||||
this.cache.set(key, { data, expiresAt });
|
||||
console.log(`💾 Cache SET: ${key} (TTL: ${ttl || this.config.ttl}s)`);
|
||||
}
|
||||
|
||||
async delete(key: string): Promise<void> {
|
||||
this.cache.delete(key);
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
this.cache.clear();
|
||||
console.log('🗑️ Cache cleared');
|
||||
}
|
||||
|
||||
private cleanup(): void {
|
||||
const now = Date.now();
|
||||
let cleaned = 0;
|
||||
|
||||
for (const [key, entry] of this.cache.entries()) {
|
||||
if (now > entry.expiresAt) {
|
||||
this.cache.delete(key);
|
||||
cleaned++;
|
||||
}
|
||||
}
|
||||
|
||||
if (cleaned > 0) {
|
||||
console.log(`🧹 Cleaned ${cleaned} expired cache entries`);
|
||||
}
|
||||
}
|
||||
|
||||
getStats(): { size: number; keys: string[] } {
|
||||
return {
|
||||
size: this.cache.size,
|
||||
keys: Array.from(this.cache.keys()),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const cacheService = new CacheService({
|
||||
enabled: true,
|
||||
ttl: 3600, // 1 hour
|
||||
});
|
||||
213
dmtp/server/src/services/content-moderation.service.js
Normal file
213
dmtp/server/src/services/content-moderation.service.js
Normal file
@@ -0,0 +1,213 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.contentModerationService = exports.ContentModerationService = void 0;
|
||||
const moderation_config_1 = require("../config/moderation.config");
|
||||
const moderation_types_1 = require("../types/moderation.types");
|
||||
const moderation_logger_1 = require("../utils/moderation-logger");
|
||||
const cache_service_1 = require("./cache.service");
|
||||
const gemini_service_1 = require("./gemini.service");
|
||||
const moderation_filters_service_1 = require("./moderation-filters.service");
|
||||
class ContentModerationService {
|
||||
/**
|
||||
* Main moderation function
|
||||
*/
|
||||
async moderateSubmission(input) {
|
||||
console.log('\n🛡️ Starting content moderation...');
|
||||
const content = input.content.trim();
|
||||
// Check cache first
|
||||
const cacheKey = cache_service_1.cacheService.generateKey('moderation', { content });
|
||||
const cached = await cache_service_1.cacheService.get(cacheKey);
|
||||
if (cached) {
|
||||
console.log('✅ Returning cached moderation result');
|
||||
return cached;
|
||||
}
|
||||
// Step 1: Check allowlist (instant approval)
|
||||
if (moderation_config_1.moderationConfig.enablePreFiltering && moderation_filters_service_1.moderationFiltersService.isAllowlisted(content)) {
|
||||
console.log('✅ Content is allowlisted - instant approval');
|
||||
const result = this.createSafeResult();
|
||||
await this.logAndCache(input, result, cacheKey);
|
||||
return result;
|
||||
}
|
||||
// Step 2: Check blocklist (instant rejection/flag)
|
||||
if (moderation_config_1.moderationConfig.enablePreFiltering) {
|
||||
const blocklistResult = moderation_filters_service_1.moderationFiltersService.checkBlocklist(content);
|
||||
if (blocklistResult) {
|
||||
await this.logAndCache(input, blocklistResult, cacheKey);
|
||||
return blocklistResult;
|
||||
}
|
||||
}
|
||||
// Step 3: AI-based moderation with Gemini
|
||||
try {
|
||||
const result = await this.moderateWithAI(content, input);
|
||||
await this.logAndCache(input, result, cacheKey);
|
||||
return result;
|
||||
}
|
||||
catch (error) {
|
||||
console.error('❌ Moderation error:', error);
|
||||
// Fallback: flag for manual review on error
|
||||
const fallbackResult = this.createFallbackResult(error);
|
||||
await this.logAndCache(input, fallbackResult, cacheKey);
|
||||
return fallbackResult;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Moderate content using Gemini AI
|
||||
*/
|
||||
async moderateWithAI(content, input) {
|
||||
const prompt = `${moderation_config_1.MODERATION_SYSTEM_PROMPT}
|
||||
|
||||
CONTENT TO MODERATE:
|
||||
"""
|
||||
${content}
|
||||
"""
|
||||
|
||||
${input.context ? `CONTEXT: Task Type: ${input.context.taskType}, Previous Violations: ${input.context.previousViolations || 0}` : ''}
|
||||
|
||||
Analyze this content and return your moderation decision in the specified JSON format.`;
|
||||
// Call Gemini API
|
||||
const responseText = await gemini_service_1.geminiService.generateText(prompt);
|
||||
// Parse response
|
||||
const geminiResponse = gemini_service_1.geminiService.parseJsonResponse(responseText);
|
||||
// Convert to our format
|
||||
const result = this.convertGeminiResponse(geminiResponse, responseText);
|
||||
console.log(`🤖 AI Moderation: ${result.action} (flagged: ${result.flagged})`);
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Convert Gemini response to our ModerationResult format
|
||||
*/
|
||||
convertGeminiResponse(response, rawResponse) {
|
||||
// Convert categories
|
||||
const categories = {
|
||||
spam: this.convertCategory(response.categories.spam),
|
||||
toxic: this.convertCategory(response.categories.toxic),
|
||||
hate_speech: this.convertCategory(response.categories.hate_speech),
|
||||
fraud: this.convertCategory(response.categories.fraud),
|
||||
inappropriate: this.convertCategory(response.categories.inappropriate),
|
||||
};
|
||||
// Determine action based on confidence and severity
|
||||
let action;
|
||||
const hasCritical = Object.values(categories).some((cat) => cat.severity === moderation_types_1.ModerationSeverity.CRITICAL);
|
||||
const highConfidence = Object.values(categories).some((cat) => cat.detected && cat.confidence >= moderation_config_1.moderationConfig.autoRejectThreshold);
|
||||
if (hasCritical && highConfidence) {
|
||||
action = moderation_types_1.ModerationAction.AUTO_REJECT;
|
||||
}
|
||||
else if (response.flagged) {
|
||||
action = moderation_types_1.ModerationAction.FLAG_REVIEW;
|
||||
}
|
||||
else {
|
||||
action = moderation_types_1.ModerationAction.APPROVE;
|
||||
}
|
||||
return {
|
||||
flagged: response.flagged,
|
||||
categories,
|
||||
action,
|
||||
explanation: response.explanation,
|
||||
timestamp: new Date().toISOString(),
|
||||
geminiResponse: rawResponse,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Convert individual category from Gemini format
|
||||
*/
|
||||
convertCategory(category) {
|
||||
return {
|
||||
detected: category.detected,
|
||||
confidence: category.confidence,
|
||||
severity: this.parseSeverity(category.severity),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Parse severity string to enum
|
||||
*/
|
||||
parseSeverity(severity) {
|
||||
const normalized = severity.toUpperCase();
|
||||
return (moderation_types_1.ModerationSeverity[normalized] ||
|
||||
moderation_types_1.ModerationSeverity.NONE);
|
||||
}
|
||||
/**
|
||||
* Create safe result (no violations)
|
||||
*/
|
||||
createSafeResult() {
|
||||
return {
|
||||
flagged: false,
|
||||
categories: {
|
||||
spam: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
toxic: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
hate_speech: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
fraud: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
inappropriate: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
},
|
||||
action: moderation_types_1.ModerationAction.APPROVE,
|
||||
explanation: 'Content is safe and approved.',
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Create fallback result on error
|
||||
*/
|
||||
createFallbackResult(error) {
|
||||
return {
|
||||
flagged: true,
|
||||
categories: {
|
||||
spam: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
toxic: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
hate_speech: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
fraud: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
inappropriate: { detected: false, confidence: 0, severity: moderation_types_1.ModerationSeverity.NONE },
|
||||
},
|
||||
action: moderation_types_1.ModerationAction.FLAG_REVIEW,
|
||||
explanation: `Moderation failed due to error. Flagged for manual review. Error: ${error.message}`,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Log and cache result
|
||||
*/
|
||||
async logAndCache(input, result, cacheKey) {
|
||||
// Log decision
|
||||
if (moderation_config_1.moderationConfig.enableLogging) {
|
||||
moderation_logger_1.moderationLogger.log(input, result);
|
||||
}
|
||||
// Cache result
|
||||
await cache_service_1.cacheService.set(cacheKey, result, 3600); // 1 hour cache
|
||||
}
|
||||
/**
|
||||
* Batch moderate multiple submissions
|
||||
*/
|
||||
async batchModerate(inputs) {
|
||||
console.log(`\n🛡️ Batch moderating ${inputs.length} submissions...`);
|
||||
const results = [];
|
||||
for (const input of inputs) {
|
||||
try {
|
||||
const result = await this.moderateSubmission(input);
|
||||
results.push(result);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Batch moderation error:', error);
|
||||
results.push(this.createFallbackResult(error));
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
/**
|
||||
* Get moderation statistics
|
||||
*/
|
||||
getStats() {
|
||||
return moderation_logger_1.moderationLogger.getStats();
|
||||
}
|
||||
/**
|
||||
* Add custom blocklist pattern
|
||||
*/
|
||||
addBlocklistPattern(pattern, category, severity) {
|
||||
moderation_filters_service_1.moderationFiltersService.addBlocklistPattern(pattern, category, severity);
|
||||
}
|
||||
/**
|
||||
* Add custom allowlist pattern
|
||||
*/
|
||||
addAllowlistPattern(pattern) {
|
||||
moderation_filters_service_1.moderationFiltersService.addAllowlistPattern(pattern);
|
||||
}
|
||||
}
|
||||
exports.ContentModerationService = ContentModerationService;
|
||||
exports.contentModerationService = new ContentModerationService();
|
||||
264
dmtp/server/src/services/content-moderation.service.ts
Normal file
264
dmtp/server/src/services/content-moderation.service.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
import { MODERATION_SYSTEM_PROMPT, moderationConfig } from '../config/moderation.config';
|
||||
import {
|
||||
CategoryDetection,
|
||||
GeminiModerationResponse,
|
||||
ModerationAction,
|
||||
ModerationCategories,
|
||||
ModerationInput,
|
||||
ModerationResult,
|
||||
ModerationSeverity,
|
||||
} from '../types/moderation.types';
|
||||
import { moderationLogger } from '../utils/moderation-logger';
|
||||
import { cacheService } from './cache.service';
|
||||
import { geminiService } from './gemini.service';
|
||||
import { moderationFiltersService } from './moderation-filters.service';
|
||||
|
||||
export class ContentModerationService {
|
||||
/**
|
||||
* Main moderation function
|
||||
*/
|
||||
async moderateSubmission(input: ModerationInput): Promise<ModerationResult> {
|
||||
console.log('\n🛡️ Starting content moderation...');
|
||||
|
||||
const content = input.content.trim();
|
||||
|
||||
// Check cache first
|
||||
const cacheKey = cacheService.generateKey('moderation', { content });
|
||||
const cached = await cacheService.get<ModerationResult>(cacheKey);
|
||||
|
||||
if (cached) {
|
||||
console.log('✅ Returning cached moderation result');
|
||||
return cached;
|
||||
}
|
||||
|
||||
// Step 1: Check allowlist (instant approval)
|
||||
if (moderationConfig.enablePreFiltering && moderationFiltersService.isAllowlisted(content)) {
|
||||
console.log('✅ Content is allowlisted - instant approval');
|
||||
const result = this.createSafeResult();
|
||||
await this.logAndCache(input, result, cacheKey);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Step 2: Check blocklist (instant rejection/flag)
|
||||
if (moderationConfig.enablePreFiltering) {
|
||||
const blocklistResult = moderationFiltersService.checkBlocklist(content);
|
||||
if (blocklistResult) {
|
||||
await this.logAndCache(input, blocklistResult, cacheKey);
|
||||
return blocklistResult;
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: AI-based moderation with Gemini
|
||||
try {
|
||||
const result = await this.moderateWithAI(content, input);
|
||||
await this.logAndCache(input, result, cacheKey);
|
||||
return result;
|
||||
} catch (error) {
|
||||
console.error('❌ Moderation error:', error);
|
||||
|
||||
// Fallback: flag for manual review on error
|
||||
const fallbackResult = this.createFallbackResult(error);
|
||||
await this.logAndCache(input, fallbackResult, cacheKey);
|
||||
return fallbackResult;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Moderate content using Gemini AI
|
||||
*/
|
||||
private async moderateWithAI(content: string, input: ModerationInput): Promise<ModerationResult> {
|
||||
const prompt = `${MODERATION_SYSTEM_PROMPT}
|
||||
|
||||
CONTENT TO MODERATE:
|
||||
"""
|
||||
${content}
|
||||
"""
|
||||
|
||||
${input.context ? `CONTEXT: Task Type: ${input.context.taskType}, Previous Violations: ${input.context.previousViolations || 0}` : ''}
|
||||
|
||||
Analyze this content and return your moderation decision in the specified JSON format.`;
|
||||
|
||||
// Call Gemini API
|
||||
const responseText = await geminiService.generateText(prompt);
|
||||
|
||||
// Parse response
|
||||
const geminiResponse = geminiService.parseJsonResponse<GeminiModerationResponse>(responseText);
|
||||
|
||||
// Convert to our format
|
||||
const result = this.convertGeminiResponse(geminiResponse, responseText);
|
||||
|
||||
console.log(`🤖 AI Moderation: ${result.action} (flagged: ${result.flagged})`);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Gemini response to our ModerationResult format
|
||||
*/
|
||||
private convertGeminiResponse(
|
||||
response: GeminiModerationResponse,
|
||||
rawResponse: string
|
||||
): ModerationResult {
|
||||
// Convert categories
|
||||
const categories: ModerationCategories = {
|
||||
spam: this.convertCategory(response.categories.spam),
|
||||
toxic: this.convertCategory(response.categories.toxic),
|
||||
hate_speech: this.convertCategory(response.categories.hate_speech),
|
||||
fraud: this.convertCategory(response.categories.fraud),
|
||||
inappropriate: this.convertCategory(response.categories.inappropriate),
|
||||
};
|
||||
|
||||
// Determine action based on confidence and severity
|
||||
let action: ModerationAction;
|
||||
|
||||
const hasCritical = Object.values(categories).some(
|
||||
(cat) => cat.severity === ModerationSeverity.CRITICAL
|
||||
);
|
||||
|
||||
const highConfidence = Object.values(categories).some(
|
||||
(cat) => cat.detected && cat.confidence >= moderationConfig.autoRejectThreshold
|
||||
);
|
||||
|
||||
if (hasCritical && highConfidence) {
|
||||
action = ModerationAction.AUTO_REJECT;
|
||||
} else if (response.flagged) {
|
||||
action = ModerationAction.FLAG_REVIEW;
|
||||
} else {
|
||||
action = ModerationAction.APPROVE;
|
||||
}
|
||||
|
||||
return {
|
||||
flagged: response.flagged,
|
||||
categories,
|
||||
action,
|
||||
explanation: response.explanation,
|
||||
timestamp: new Date().toISOString(),
|
||||
geminiResponse: rawResponse,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert individual category from Gemini format
|
||||
*/
|
||||
private convertCategory(category: {
|
||||
detected: boolean;
|
||||
confidence: number;
|
||||
severity: string;
|
||||
}): CategoryDetection {
|
||||
return {
|
||||
detected: category.detected,
|
||||
confidence: category.confidence,
|
||||
severity: this.parseSeverity(category.severity),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse severity string to enum
|
||||
*/
|
||||
private parseSeverity(severity: string): ModerationSeverity {
|
||||
const normalized = severity.toUpperCase();
|
||||
return (
|
||||
ModerationSeverity[normalized as keyof typeof ModerationSeverity] ||
|
||||
ModerationSeverity.NONE
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create safe result (no violations)
|
||||
*/
|
||||
private createSafeResult(): ModerationResult {
|
||||
return {
|
||||
flagged: false,
|
||||
categories: {
|
||||
spam: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
toxic: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
hate_speech: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
fraud: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
inappropriate: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
},
|
||||
action: ModerationAction.APPROVE,
|
||||
explanation: 'Content is safe and approved.',
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create fallback result on error
|
||||
*/
|
||||
private createFallbackResult(error: any): ModerationResult {
|
||||
return {
|
||||
flagged: true,
|
||||
categories: {
|
||||
spam: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
toxic: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
hate_speech: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
fraud: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
inappropriate: { detected: false, confidence: 0, severity: ModerationSeverity.NONE },
|
||||
},
|
||||
action: ModerationAction.FLAG_REVIEW,
|
||||
explanation: `Moderation failed due to error. Flagged for manual review. Error: ${error.message}`,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Log and cache result
|
||||
*/
|
||||
private async logAndCache(
|
||||
input: ModerationInput,
|
||||
result: ModerationResult,
|
||||
cacheKey: string
|
||||
): Promise<void> {
|
||||
// Log decision
|
||||
if (moderationConfig.enableLogging) {
|
||||
moderationLogger.log(input, result);
|
||||
}
|
||||
|
||||
// Cache result
|
||||
await cacheService.set(cacheKey, result, 3600); // 1 hour cache
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch moderate multiple submissions
|
||||
*/
|
||||
async batchModerate(inputs: ModerationInput[]): Promise<ModerationResult[]> {
|
||||
console.log(`\n🛡️ Batch moderating ${inputs.length} submissions...`);
|
||||
|
||||
const results: ModerationResult[] = [];
|
||||
|
||||
for (const input of inputs) {
|
||||
try {
|
||||
const result = await this.moderateSubmission(input);
|
||||
results.push(result);
|
||||
} catch (error) {
|
||||
console.error('Batch moderation error:', error);
|
||||
results.push(this.createFallbackResult(error));
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get moderation statistics
|
||||
*/
|
||||
getStats() {
|
||||
return moderationLogger.getStats();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add custom blocklist pattern
|
||||
*/
|
||||
addBlocklistPattern(pattern: RegExp, category: string, severity: ModerationSeverity): void {
|
||||
moderationFiltersService.addBlocklistPattern(pattern, category, severity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add custom allowlist pattern
|
||||
*/
|
||||
addAllowlistPattern(pattern: RegExp): void {
|
||||
moderationFiltersService.addAllowlistPattern(pattern);
|
||||
}
|
||||
}
|
||||
|
||||
export const contentModerationService = new ContentModerationService();
|
||||
92
dmtp/server/src/services/gemini.service.js
Normal file
92
dmtp/server/src/services/gemini.service.js
Normal file
@@ -0,0 +1,92 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.geminiService = exports.GeminiService = void 0;
|
||||
const generative_ai_1 = require("@google/generative-ai");
|
||||
const ai_config_1 = require("../config/ai.config");
|
||||
const rate_limiter_1 = require("../utils/rate-limiter");
|
||||
const retry_1 = require("../utils/retry");
|
||||
class GeminiService {
|
||||
constructor() {
|
||||
if (!ai_config_1.aiConfig.apiKey) {
|
||||
throw new Error('GEMINI_API_KEY is not configured');
|
||||
}
|
||||
this.genAI = new generative_ai_1.GoogleGenerativeAI(ai_config_1.aiConfig.apiKey);
|
||||
this.rateLimiter = new rate_limiter_1.RateLimiter(ai_config_1.aiConfig.rateLimit);
|
||||
}
|
||||
async generateText(prompt) {
|
||||
// Check rate limit
|
||||
await this.rateLimiter.checkLimit();
|
||||
return (0, retry_1.withRetry)(async () => {
|
||||
const model = this.genAI.getGenerativeModel({
|
||||
model: ai_config_1.aiConfig.model,
|
||||
generationConfig: {
|
||||
temperature: ai_config_1.aiConfig.temperature,
|
||||
maxOutputTokens: ai_config_1.aiConfig.maxOutputTokens,
|
||||
},
|
||||
});
|
||||
console.log(`🤖 Calling Gemini API (${ai_config_1.aiConfig.model})...`);
|
||||
const result = await model.generateContent(prompt);
|
||||
const response = result.response;
|
||||
const text = response.text();
|
||||
console.log(`✅ Gemini API response received (${text.length} chars)`);
|
||||
return text;
|
||||
}, ai_config_1.aiConfig.retry, 'Gemini API call');
|
||||
}
|
||||
async generateFromImage(prompt, imageUrl) {
|
||||
// Check rate limit
|
||||
await this.rateLimiter.checkLimit();
|
||||
return (0, retry_1.withRetry)(async () => {
|
||||
// Use vision model for image analysis
|
||||
const model = this.genAI.getGenerativeModel({
|
||||
model: 'gemini-1.5-flash',
|
||||
generationConfig: {
|
||||
temperature: ai_config_1.aiConfig.temperature,
|
||||
maxOutputTokens: ai_config_1.aiConfig.maxOutputTokens,
|
||||
},
|
||||
});
|
||||
console.log(`🖼️ Calling Gemini Vision API...`);
|
||||
// Fetch image
|
||||
const imageResponse = await fetch(imageUrl);
|
||||
if (!imageResponse.ok) {
|
||||
throw new Error(`Failed to fetch image: ${imageResponse.statusText}`);
|
||||
}
|
||||
const imageBuffer = await imageResponse.arrayBuffer();
|
||||
const base64Image = Buffer.from(imageBuffer).toString('base64');
|
||||
// Get mime type
|
||||
const mimeType = imageResponse.headers.get('content-type') || 'image/jpeg';
|
||||
const imagePart = {
|
||||
inlineData: {
|
||||
data: base64Image,
|
||||
mimeType,
|
||||
},
|
||||
};
|
||||
const result = await model.generateContent([prompt, imagePart]);
|
||||
const response = result.response;
|
||||
const text = response.text();
|
||||
console.log(`✅ Gemini Vision API response received`);
|
||||
return text;
|
||||
}, ai_config_1.aiConfig.retry, 'Gemini Vision API call');
|
||||
}
|
||||
parseJsonResponse(responseText) {
|
||||
try {
|
||||
// Remove markdown code blocks if present
|
||||
let cleaned = responseText.trim();
|
||||
if (cleaned.startsWith('```json')) {
|
||||
cleaned = cleaned.replace(/```json\n?/g, '').replace(/```\n?$/g, '');
|
||||
}
|
||||
else if (cleaned.startsWith('```')) {
|
||||
cleaned = cleaned.replace(/```\n?/g, '');
|
||||
}
|
||||
return JSON.parse(cleaned);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to parse JSON response:', responseText);
|
||||
throw new Error(`Invalid JSON response from Gemini: ${error}`);
|
||||
}
|
||||
}
|
||||
getRateLimitStatus() {
|
||||
return this.rateLimiter.getStatus();
|
||||
}
|
||||
}
|
||||
exports.GeminiService = GeminiService;
|
||||
exports.geminiService = new GeminiService();
|
||||
143
dmtp/server/src/services/gemini.service.ts
Normal file
143
dmtp/server/src/services/gemini.service.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { GoogleGenerativeAI } from '@google/generative-ai';
|
||||
import { aiConfig } from '../config/ai.config';
|
||||
import { RateLimiter } from '../utils/rate-limiter';
|
||||
import { withRetry } from '../utils/retry';
|
||||
|
||||
export class GeminiService {
|
||||
private genAI: GoogleGenerativeAI;
|
||||
private rateLimiter: RateLimiter;
|
||||
|
||||
constructor() {
|
||||
if (!aiConfig.apiKey) {
|
||||
throw new Error('GEMINI_API_KEY is not configured');
|
||||
}
|
||||
|
||||
this.genAI = new GoogleGenerativeAI(aiConfig.apiKey);
|
||||
this.rateLimiter = new RateLimiter(aiConfig.rateLimit);
|
||||
}
|
||||
|
||||
async generateText(prompt: string): Promise<string> {
|
||||
// Check rate limit
|
||||
await this.rateLimiter.checkLimit();
|
||||
|
||||
return withRetry(
|
||||
async () => {
|
||||
const model = this.genAI.getGenerativeModel({
|
||||
model: aiConfig.model,
|
||||
generationConfig: {
|
||||
temperature: aiConfig.temperature,
|
||||
maxOutputTokens: aiConfig.maxOutputTokens,
|
||||
responseMimeType: "application/json", // Force JSON response
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`🤖 Calling Gemini API (${aiConfig.model})...`);
|
||||
const result = await model.generateContent(prompt);
|
||||
const response = result.response;
|
||||
const text = response.text();
|
||||
|
||||
console.log(`✅ Gemini API response received (${text.length} chars)`);
|
||||
return text;
|
||||
},
|
||||
aiConfig.retry,
|
||||
'Gemini API call'
|
||||
);
|
||||
}
|
||||
|
||||
async generateFromImage(
|
||||
prompt: string,
|
||||
imageUrl: string
|
||||
): Promise<string> {
|
||||
// Check rate limit
|
||||
await this.rateLimiter.checkLimit();
|
||||
|
||||
return withRetry(
|
||||
async () => {
|
||||
// Use vision model for image analysis
|
||||
const model = this.genAI.getGenerativeModel({
|
||||
model: 'gemini-1.5-flash',
|
||||
generationConfig: {
|
||||
temperature: aiConfig.temperature,
|
||||
maxOutputTokens: aiConfig.maxOutputTokens,
|
||||
responseMimeType: "application/json", // Force JSON response
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`🖼️ Calling Gemini Vision API...`);
|
||||
|
||||
// Fetch image
|
||||
const imageResponse = await fetch(imageUrl);
|
||||
if (!imageResponse.ok) {
|
||||
throw new Error(`Failed to fetch image: ${imageResponse.statusText}`);
|
||||
}
|
||||
|
||||
const imageBuffer = await imageResponse.arrayBuffer();
|
||||
const base64Image = Buffer.from(imageBuffer).toString('base64');
|
||||
|
||||
// Get mime type
|
||||
const mimeType = imageResponse.headers.get('content-type') || 'image/jpeg';
|
||||
|
||||
const imagePart = {
|
||||
inlineData: {
|
||||
data: base64Image,
|
||||
mimeType,
|
||||
},
|
||||
};
|
||||
|
||||
const result = await model.generateContent([prompt, imagePart]);
|
||||
const response = result.response;
|
||||
const text = response.text();
|
||||
|
||||
console.log(`✅ Gemini Vision API response received`);
|
||||
return text;
|
||||
},
|
||||
aiConfig.retry,
|
||||
'Gemini Vision API call'
|
||||
);
|
||||
}
|
||||
|
||||
parseJsonResponse<T>(responseText: string): T {
|
||||
try {
|
||||
// Remove markdown code blocks if present
|
||||
let cleaned = responseText.trim();
|
||||
|
||||
if (cleaned.startsWith('```json')) {
|
||||
cleaned = cleaned.replace(/```json\n?/g, '').replace(/```\n?$/g, '');
|
||||
} else if (cleaned.startsWith('```')) {
|
||||
cleaned = cleaned.replace(/```\n?/g, '');
|
||||
}
|
||||
|
||||
// Try to extract JSON object if there's extra text
|
||||
const jsonMatch = cleaned.match(/\{[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
cleaned = jsonMatch[0];
|
||||
}
|
||||
|
||||
// Additional cleanup - remove any trailing commas before closing braces
|
||||
cleaned = cleaned.replace(/,(\s*[}\]])/g, '$1');
|
||||
|
||||
const parsed = JSON.parse(cleaned);
|
||||
|
||||
// Validate that we got an object back
|
||||
if (typeof parsed !== 'object' || parsed === null) {
|
||||
throw new Error('Response is not a valid JSON object');
|
||||
}
|
||||
|
||||
return parsed;
|
||||
} catch (error) {
|
||||
console.error('Failed to parse JSON response. Raw response:', responseText);
|
||||
console.error('Parse error:', error);
|
||||
|
||||
// Log the first 500 chars for debugging
|
||||
console.error('Response preview:', responseText.substring(0, 500));
|
||||
|
||||
throw new Error(`Invalid JSON response from Gemini: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
getRateLimitStatus() {
|
||||
return this.rateLimiter.getStatus();
|
||||
}
|
||||
}
|
||||
|
||||
export const geminiService = new GeminiService();
|
||||
87
dmtp/server/src/services/moderation-filters.service.js
Normal file
87
dmtp/server/src/services/moderation-filters.service.js
Normal file
@@ -0,0 +1,87 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.moderationFiltersService = exports.ModerationFiltersService = void 0;
|
||||
const moderation_config_1 = require("../config/moderation.config");
|
||||
const moderation_types_1 = require("../types/moderation.types");
|
||||
class ModerationFiltersService {
|
||||
/**
|
||||
* Check if content matches allowlist (safe content, skip AI check)
|
||||
*/
|
||||
isAllowlisted(content) {
|
||||
const trimmed = content.trim();
|
||||
return moderation_config_1.ALLOWLIST_PATTERNS.some((pattern) => pattern.test(trimmed));
|
||||
}
|
||||
/**
|
||||
* Check if content matches blocklist (instant rejection)
|
||||
*/
|
||||
checkBlocklist(content) {
|
||||
for (const rule of moderation_config_1.BLOCKLIST_PATTERNS) {
|
||||
const matches = content.match(rule.pattern);
|
||||
if (matches) {
|
||||
console.log(`🚫 Blocklist match: ${rule.category} (${rule.severity})`);
|
||||
return this.createBlocklistResult(rule.category, rule.severity, matches);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Create moderation result for blocklist match
|
||||
*/
|
||||
createBlocklistResult(category, severity, matches) {
|
||||
const categories = {
|
||||
spam: this.createEmptyDetection(),
|
||||
toxic: this.createEmptyDetection(),
|
||||
hate_speech: this.createEmptyDetection(),
|
||||
fraud: this.createEmptyDetection(),
|
||||
inappropriate: this.createEmptyDetection(),
|
||||
};
|
||||
// Set the matched category
|
||||
if (category in categories) {
|
||||
categories[category] = {
|
||||
detected: true,
|
||||
confidence: 100,
|
||||
severity,
|
||||
examples: matches.slice(0, 3), // Max 3 examples
|
||||
};
|
||||
}
|
||||
return {
|
||||
flagged: true,
|
||||
categories,
|
||||
action: severity === moderation_types_1.ModerationSeverity.CRITICAL
|
||||
? moderation_types_1.ModerationAction.AUTO_REJECT
|
||||
: moderation_types_1.ModerationAction.FLAG_REVIEW,
|
||||
explanation: `Blocklist match: ${category} (${severity}). Matched pattern in content.`,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Create empty category detection
|
||||
*/
|
||||
createEmptyDetection() {
|
||||
return {
|
||||
detected: false,
|
||||
confidence: 0,
|
||||
severity: moderation_types_1.ModerationSeverity.NONE,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Add custom blocklist pattern
|
||||
*/
|
||||
addBlocklistPattern(pattern, category, severity) {
|
||||
moderation_config_1.BLOCKLIST_PATTERNS.push({
|
||||
pattern,
|
||||
category,
|
||||
severity,
|
||||
});
|
||||
console.log(`✅ Added blocklist pattern for ${category}`);
|
||||
}
|
||||
/**
|
||||
* Add custom allowlist pattern
|
||||
*/
|
||||
addAllowlistPattern(pattern) {
|
||||
moderation_config_1.ALLOWLIST_PATTERNS.push(pattern);
|
||||
console.log(`✅ Added allowlist pattern`);
|
||||
}
|
||||
}
|
||||
exports.ModerationFiltersService = ModerationFiltersService;
|
||||
exports.moderationFiltersService = new ModerationFiltersService();
|
||||
106
dmtp/server/src/services/moderation-filters.service.ts
Normal file
106
dmtp/server/src/services/moderation-filters.service.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { ALLOWLIST_PATTERNS, BLOCKLIST_PATTERNS } from '../config/moderation.config';
|
||||
import {
|
||||
CategoryDetection,
|
||||
ModerationAction,
|
||||
ModerationCategories,
|
||||
ModerationResult,
|
||||
ModerationSeverity,
|
||||
} from '../types/moderation.types';
|
||||
|
||||
export class ModerationFiltersService {
|
||||
/**
|
||||
* Check if content matches allowlist (safe content, skip AI check)
|
||||
*/
|
||||
isAllowlisted(content: string): boolean {
|
||||
const trimmed = content.trim();
|
||||
return ALLOWLIST_PATTERNS.some((pattern) => pattern.test(trimmed));
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if content matches blocklist (instant rejection)
|
||||
*/
|
||||
checkBlocklist(content: string): ModerationResult | null {
|
||||
for (const rule of BLOCKLIST_PATTERNS) {
|
||||
const matches = content.match(rule.pattern);
|
||||
|
||||
if (matches) {
|
||||
console.log(`🚫 Blocklist match: ${rule.category} (${rule.severity})`);
|
||||
|
||||
return this.createBlocklistResult(rule.category, rule.severity, matches);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create moderation result for blocklist match
|
||||
*/
|
||||
private createBlocklistResult(
|
||||
category: string,
|
||||
severity: ModerationSeverity,
|
||||
matches: RegExpMatchArray
|
||||
): ModerationResult {
|
||||
const categories: ModerationCategories = {
|
||||
spam: this.createEmptyDetection(),
|
||||
toxic: this.createEmptyDetection(),
|
||||
hate_speech: this.createEmptyDetection(),
|
||||
fraud: this.createEmptyDetection(),
|
||||
inappropriate: this.createEmptyDetection(),
|
||||
};
|
||||
|
||||
// Set the matched category
|
||||
if (category in categories) {
|
||||
categories[category as keyof ModerationCategories] = {
|
||||
detected: true,
|
||||
confidence: 100,
|
||||
severity,
|
||||
examples: matches.slice(0, 3), // Max 3 examples
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
flagged: true,
|
||||
categories,
|
||||
action:
|
||||
severity === ModerationSeverity.CRITICAL
|
||||
? ModerationAction.AUTO_REJECT
|
||||
: ModerationAction.FLAG_REVIEW,
|
||||
explanation: `Blocklist match: ${category} (${severity}). Matched pattern in content.`,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create empty category detection
|
||||
*/
|
||||
private createEmptyDetection(): CategoryDetection {
|
||||
return {
|
||||
detected: false,
|
||||
confidence: 0,
|
||||
severity: ModerationSeverity.NONE,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Add custom blocklist pattern
|
||||
*/
|
||||
addBlocklistPattern(pattern: RegExp, category: string, severity: ModerationSeverity): void {
|
||||
BLOCKLIST_PATTERNS.push({
|
||||
pattern,
|
||||
category,
|
||||
severity,
|
||||
});
|
||||
console.log(`✅ Added blocklist pattern for ${category}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add custom allowlist pattern
|
||||
*/
|
||||
addAllowlistPattern(pattern: RegExp): void {
|
||||
ALLOWLIST_PATTERNS.push(pattern);
|
||||
console.log(`✅ Added allowlist pattern`);
|
||||
}
|
||||
}
|
||||
|
||||
export const moderationFiltersService = new ModerationFiltersService();
|
||||
149
dmtp/server/src/services/notification.service.js
Normal file
149
dmtp/server/src/services/notification.service.js
Normal file
@@ -0,0 +1,149 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.notificationService = exports.NotificationService = exports.NotificationType = void 0;
|
||||
const connections_1 = require("../database/connections");
|
||||
var NotificationType;
|
||||
(function (NotificationType) {
|
||||
NotificationType["SUBMISSION_APPROVED"] = "SUBMISSION_APPROVED";
|
||||
NotificationType["SUBMISSION_REJECTED"] = "SUBMISSION_REJECTED";
|
||||
NotificationType["PAYMENT_RELEASED"] = "PAYMENT_RELEASED";
|
||||
NotificationType["TASK_EXPIRED"] = "TASK_EXPIRED";
|
||||
})(NotificationType || (exports.NotificationType = NotificationType = {}));
|
||||
class NotificationService {
|
||||
/**
|
||||
* Send notification to user
|
||||
*/
|
||||
async send(userId, data) {
|
||||
try {
|
||||
console.log(`\n📬 Sending notification to user ${userId}`);
|
||||
console.log(`Type: ${data.type}`);
|
||||
// Get user details
|
||||
const user = await connections_1.prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
});
|
||||
if (!user) {
|
||||
console.error('User not found for notification');
|
||||
return;
|
||||
}
|
||||
// Log notification
|
||||
this.logNotification(user.walletAddress, data);
|
||||
// Send via different channels
|
||||
await Promise.all([
|
||||
this.sendInAppNotification(userId, data),
|
||||
// Add more channels as needed:
|
||||
// this.sendEmail(user.email, data),
|
||||
// this.sendPushNotification(user.fcmToken, data),
|
||||
// this.sendSMS(user.phoneNumber, data),
|
||||
]);
|
||||
console.log(`✅ Notification sent successfully`);
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to send notification:', error);
|
||||
// Don't throw - notifications are not critical
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Save in-app notification (can be fetched by frontend)
|
||||
*/
|
||||
async sendInAppNotification(userId, data) {
|
||||
// Store in database for in-app display
|
||||
const notification = {
|
||||
userId,
|
||||
type: data.type,
|
||||
title: this.getNotificationTitle(data.type),
|
||||
message: this.getNotificationMessage(data),
|
||||
data: JSON.stringify(data),
|
||||
read: false,
|
||||
createdAt: new Date(),
|
||||
};
|
||||
console.log('📱 In-app notification created:', notification);
|
||||
// TODO: Store in notifications table (create migration if needed)
|
||||
// await prisma.notification.create({ data: notification });
|
||||
}
|
||||
/**
|
||||
* Send email notification (placeholder)
|
||||
*/
|
||||
async sendEmail(email, data) {
|
||||
if (!email)
|
||||
return;
|
||||
console.log(`📧 Email notification to ${email}:`, {
|
||||
subject: this.getNotificationTitle(data.type),
|
||||
body: this.getNotificationMessage(data),
|
||||
});
|
||||
// TODO: Integrate with email service (SendGrid, AWS SES, etc.)
|
||||
// await emailService.send({
|
||||
// to: email,
|
||||
// subject: this.getNotificationTitle(data.type),
|
||||
// html: this.getEmailTemplate(data),
|
||||
// });
|
||||
}
|
||||
/**
|
||||
* Send push notification (placeholder)
|
||||
*/
|
||||
async sendPushNotification(fcmToken, data) {
|
||||
if (!fcmToken)
|
||||
return;
|
||||
console.log(`🔔 Push notification:`, {
|
||||
token: fcmToken,
|
||||
title: this.getNotificationTitle(data.type),
|
||||
body: this.getNotificationMessage(data),
|
||||
});
|
||||
// TODO: Integrate with Firebase Cloud Messaging
|
||||
// await admin.messaging().send({
|
||||
// token: fcmToken,
|
||||
// notification: {
|
||||
// title: this.getNotificationTitle(data.type),
|
||||
// body: this.getNotificationMessage(data),
|
||||
// },
|
||||
// data: data,
|
||||
// });
|
||||
}
|
||||
/**
|
||||
* Get notification title
|
||||
*/
|
||||
getNotificationTitle(type) {
|
||||
const titles = {
|
||||
[NotificationType.SUBMISSION_APPROVED]: '✅ Submission Approved!',
|
||||
[NotificationType.SUBMISSION_REJECTED]: '❌ Submission Rejected',
|
||||
[NotificationType.PAYMENT_RELEASED]: '💰 Payment Released!',
|
||||
[NotificationType.TASK_EXPIRED]: '⏰ Task Expired',
|
||||
};
|
||||
return titles[type];
|
||||
}
|
||||
/**
|
||||
* Get notification message
|
||||
*/
|
||||
getNotificationMessage(data) {
|
||||
switch (data.type) {
|
||||
case NotificationType.SUBMISSION_APPROVED:
|
||||
return `Your submission has been approved! Payment of ${data.amount} cUSD is being processed.`;
|
||||
case NotificationType.SUBMISSION_REJECTED:
|
||||
return `Your submission was rejected. Reason: ${data.result?.reasoning || 'Did not meet criteria'}`;
|
||||
case NotificationType.PAYMENT_RELEASED:
|
||||
return `Payment of ${data.amount} cUSD has been sent to your wallet. Tx: ${data.txHash}`;
|
||||
case NotificationType.TASK_EXPIRED:
|
||||
return `Task has expired without completion.`;
|
||||
default:
|
||||
return 'You have a new notification';
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Log notification for debugging
|
||||
*/
|
||||
logNotification(walletAddress, data) {
|
||||
console.log(`\n📋 Notification Log:`);
|
||||
console.log(`To: ${walletAddress}`);
|
||||
console.log(`Type: ${data.type}`);
|
||||
console.log(`Data:`, JSON.stringify(data, null, 2));
|
||||
}
|
||||
/**
|
||||
* Batch send notifications
|
||||
*/
|
||||
async sendBatch(notifications) {
|
||||
console.log(`\n📬 Sending batch of ${notifications.length} notifications...`);
|
||||
await Promise.all(notifications.map((notification) => this.send(notification.userId, notification.data)));
|
||||
console.log(`✅ Batch notifications sent`);
|
||||
}
|
||||
}
|
||||
exports.NotificationService = NotificationService;
|
||||
exports.notificationService = new NotificationService();
|
||||
187
dmtp/server/src/services/notification.service.ts
Normal file
187
dmtp/server/src/services/notification.service.ts
Normal file
@@ -0,0 +1,187 @@
|
||||
import { prisma } from '../database/connections';
|
||||
|
||||
export enum NotificationType {
|
||||
SUBMISSION_APPROVED = 'SUBMISSION_APPROVED',
|
||||
SUBMISSION_REJECTED = 'SUBMISSION_REJECTED',
|
||||
PAYMENT_RELEASED = 'PAYMENT_RELEASED',
|
||||
TASK_EXPIRED = 'TASK_EXPIRED',
|
||||
}
|
||||
|
||||
export interface NotificationData {
|
||||
type: NotificationType;
|
||||
taskId: string;
|
||||
submissionId: string;
|
||||
result?: any;
|
||||
amount?: number;
|
||||
txHash?: string;
|
||||
}
|
||||
|
||||
export class NotificationService {
|
||||
/**
|
||||
* Send notification to user
|
||||
*/
|
||||
async send(userId: string, data: NotificationData): Promise<void> {
|
||||
try {
|
||||
console.log(`\n📬 Sending notification to user ${userId}`);
|
||||
console.log(`Type: ${data.type}`);
|
||||
|
||||
// Get user details
|
||||
const user = await prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
console.error('User not found for notification');
|
||||
return;
|
||||
}
|
||||
|
||||
// Log notification
|
||||
this.logNotification(user.walletAddress, data);
|
||||
|
||||
// Send via different channels
|
||||
await Promise.all([
|
||||
this.sendInAppNotification(userId, data),
|
||||
// Add more channels as needed:
|
||||
// this.sendEmail(user.email, data),
|
||||
// this.sendPushNotification(user.fcmToken, data),
|
||||
// this.sendSMS(user.phoneNumber, data),
|
||||
]);
|
||||
|
||||
console.log(`✅ Notification sent successfully`);
|
||||
} catch (error) {
|
||||
console.error('Failed to send notification:', error);
|
||||
// Don't throw - notifications are not critical
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save in-app notification (can be fetched by frontend)
|
||||
*/
|
||||
private async sendInAppNotification(
|
||||
userId: string,
|
||||
data: NotificationData
|
||||
): Promise<void> {
|
||||
// Store in database for in-app display
|
||||
const notification = {
|
||||
userId,
|
||||
type: data.type,
|
||||
title: this.getNotificationTitle(data.type),
|
||||
message: this.getNotificationMessage(data),
|
||||
data: JSON.stringify(data),
|
||||
read: false,
|
||||
createdAt: new Date(),
|
||||
};
|
||||
|
||||
console.log('📱 In-app notification created:', notification);
|
||||
|
||||
// TODO: Store in notifications table (create migration if needed)
|
||||
// await prisma.notification.create({ data: notification });
|
||||
}
|
||||
|
||||
/**
|
||||
* Send email notification (placeholder)
|
||||
*/
|
||||
private async sendEmail(email: string | null, data: NotificationData): Promise<void> {
|
||||
if (!email) return;
|
||||
|
||||
console.log(`📧 Email notification to ${email}:`, {
|
||||
subject: this.getNotificationTitle(data.type),
|
||||
body: this.getNotificationMessage(data),
|
||||
});
|
||||
|
||||
// TODO: Integrate with email service (SendGrid, AWS SES, etc.)
|
||||
// await emailService.send({
|
||||
// to: email,
|
||||
// subject: this.getNotificationTitle(data.type),
|
||||
// html: this.getEmailTemplate(data),
|
||||
// });
|
||||
}
|
||||
|
||||
/**
|
||||
* Send push notification (placeholder)
|
||||
*/
|
||||
private async sendPushNotification(
|
||||
fcmToken: string | null,
|
||||
data: NotificationData
|
||||
): Promise<void> {
|
||||
if (!fcmToken) return;
|
||||
|
||||
console.log(`🔔 Push notification:`, {
|
||||
token: fcmToken,
|
||||
title: this.getNotificationTitle(data.type),
|
||||
body: this.getNotificationMessage(data),
|
||||
});
|
||||
|
||||
// TODO: Integrate with Firebase Cloud Messaging
|
||||
// await admin.messaging().send({
|
||||
// token: fcmToken,
|
||||
// notification: {
|
||||
// title: this.getNotificationTitle(data.type),
|
||||
// body: this.getNotificationMessage(data),
|
||||
// },
|
||||
// data: data,
|
||||
// });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get notification title
|
||||
*/
|
||||
private getNotificationTitle(type: NotificationType): string {
|
||||
const titles: Record<NotificationType, string> = {
|
||||
[NotificationType.SUBMISSION_APPROVED]: '✅ Submission Approved!',
|
||||
[NotificationType.SUBMISSION_REJECTED]: '❌ Submission Rejected',
|
||||
[NotificationType.PAYMENT_RELEASED]: '💰 Payment Released!',
|
||||
[NotificationType.TASK_EXPIRED]: '⏰ Task Expired',
|
||||
};
|
||||
return titles[type];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get notification message
|
||||
*/
|
||||
private getNotificationMessage(data: NotificationData): string {
|
||||
switch (data.type) {
|
||||
case NotificationType.SUBMISSION_APPROVED:
|
||||
return `Your submission has been approved! Payment of ${data.amount} cUSD is being processed.`;
|
||||
|
||||
case NotificationType.SUBMISSION_REJECTED:
|
||||
return `Your submission was rejected. Reason: ${data.result?.reasoning || 'Did not meet criteria'}`;
|
||||
|
||||
case NotificationType.PAYMENT_RELEASED:
|
||||
return `Payment of ${data.amount} cUSD has been sent to your wallet. Tx: ${data.txHash}`;
|
||||
|
||||
case NotificationType.TASK_EXPIRED:
|
||||
return `Task has expired without completion.`;
|
||||
|
||||
default:
|
||||
return 'You have a new notification';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log notification for debugging
|
||||
*/
|
||||
private logNotification(walletAddress: string, data: NotificationData): void {
|
||||
console.log(`\n📋 Notification Log:`);
|
||||
console.log(`To: ${walletAddress}`);
|
||||
console.log(`Type: ${data.type}`);
|
||||
console.log(`Data:`, JSON.stringify(data, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch send notifications
|
||||
*/
|
||||
async sendBatch(notifications: Array<{ userId: string; data: NotificationData }>): Promise<void> {
|
||||
console.log(`\n📬 Sending batch of ${notifications.length} notifications...`);
|
||||
|
||||
await Promise.all(
|
||||
notifications.map((notification) =>
|
||||
this.send(notification.userId, notification.data)
|
||||
)
|
||||
);
|
||||
|
||||
console.log(`✅ Batch notifications sent`);
|
||||
}
|
||||
}
|
||||
|
||||
export const notificationService = new NotificationService();
|
||||
189
dmtp/server/src/services/payment.service.js
Normal file
189
dmtp/server/src/services/payment.service.js
Normal file
@@ -0,0 +1,189 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.paymentService = void 0;
|
||||
const connections_1 = require("../database/connections");
|
||||
const blockchain_service_1 = require("./blockchain.service");
|
||||
const notification_service_1 = require("./notification.service");
|
||||
const MAX_RETRY_ATTEMPTS = 3;
|
||||
const RETRY_DELAY_MS = 5000; // 5 seconds
|
||||
/**
|
||||
* Payment Service
|
||||
* Handles payment processing, retries, and error recovery
|
||||
*/
|
||||
class PaymentService {
|
||||
/**
|
||||
* Sleep helper function
|
||||
*/
|
||||
static sleep(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
/**
|
||||
* Approve submission with retry logic and error handling
|
||||
*/
|
||||
static async approveSubmissionWithRetry(taskId, submissionId, workerId, contractTaskId, paymentAmount) {
|
||||
let lastError = null;
|
||||
let txHash = null;
|
||||
for (let attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; attempt++) {
|
||||
try {
|
||||
console.log(`\n💳 Approving submission (Attempt ${attempt}/${MAX_RETRY_ATTEMPTS})`);
|
||||
// Call blockchain to release payment
|
||||
txHash = await blockchain_service_1.blockchainService.approveSubmission(contractTaskId);
|
||||
console.log(`✅ Blockchain approval successful: ${txHash}`);
|
||||
// Update payment record with transaction hash
|
||||
await connections_1.prisma.payment.updateMany({
|
||||
where: {
|
||||
taskId,
|
||||
workerId,
|
||||
},
|
||||
data: {
|
||||
transactionHash: txHash,
|
||||
status: "completed",
|
||||
},
|
||||
});
|
||||
console.log(`✅ Payment record updated with transaction hash`);
|
||||
// Update submission with transaction hash
|
||||
await connections_1.prisma.submission.update({
|
||||
where: { id: submissionId },
|
||||
data: {
|
||||
paymentTransactionHash: txHash,
|
||||
},
|
||||
});
|
||||
// Notify worker of successful payment
|
||||
await notification_service_1.notificationService.send(workerId, {
|
||||
type: notification_service_1.NotificationType.PAYMENT_RELEASED,
|
||||
taskId,
|
||||
submissionId,
|
||||
amount: Number(paymentAmount),
|
||||
txHash: txHash,
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
txHash,
|
||||
attempts: attempt,
|
||||
};
|
||||
}
|
||||
catch (error) {
|
||||
lastError = error;
|
||||
console.error(`⚠️ Attempt ${attempt} failed:`, error.message);
|
||||
// If this is not the last attempt, wait before retrying
|
||||
if (attempt < MAX_RETRY_ATTEMPTS) {
|
||||
console.log(`⏳ Retrying in ${RETRY_DELAY_MS}ms...`);
|
||||
await this.sleep(RETRY_DELAY_MS);
|
||||
}
|
||||
}
|
||||
}
|
||||
// All retries failed - update payment status to failed
|
||||
console.error(`❌ All ${MAX_RETRY_ATTEMPTS} attempts failed`);
|
||||
try {
|
||||
await connections_1.prisma.payment.updateMany({
|
||||
where: {
|
||||
taskId,
|
||||
workerId,
|
||||
},
|
||||
data: {
|
||||
status: "failed",
|
||||
},
|
||||
});
|
||||
// Notify worker of payment failure
|
||||
await notification_service_1.notificationService.send(workerId, {
|
||||
type: notification_service_1.NotificationType.SUBMISSION_REJECTED,
|
||||
taskId,
|
||||
submissionId,
|
||||
result: { error: lastError?.message || "Payment processing failed" },
|
||||
});
|
||||
}
|
||||
catch (updateError) {
|
||||
console.error("Failed to update payment status:", updateError);
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
error: lastError?.message,
|
||||
attempts: MAX_RETRY_ATTEMPTS,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Rollback payment on verification failure
|
||||
*/
|
||||
static async rollbackPayment(taskId, workerId) {
|
||||
try {
|
||||
console.log(`🔙 Rolling back payment for task ${taskId}, worker ${workerId}`);
|
||||
// Delete the pending payment record
|
||||
const deletedPayment = await connections_1.prisma.payment.deleteMany({
|
||||
where: {
|
||||
taskId,
|
||||
workerId,
|
||||
status: "pending",
|
||||
},
|
||||
});
|
||||
if (deletedPayment.count > 0) {
|
||||
console.log(`✅ Deleted ${deletedPayment.count} pending payment record(s)`);
|
||||
}
|
||||
else {
|
||||
console.log(`⚠️ No pending payment records found to delete`);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Failed to rollback payment:", error);
|
||||
throw new Error(`Payment rollback failed: ${error}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get payment status by submission ID
|
||||
*/
|
||||
static async getPaymentStatus(submissionId) {
|
||||
const submission = await connections_1.prisma.submission.findUnique({
|
||||
where: { id: submissionId },
|
||||
include: {
|
||||
task: {
|
||||
select: { id: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
if (!submission) {
|
||||
throw new Error("Submission not found");
|
||||
}
|
||||
const payment = await connections_1.prisma.payment.findFirst({
|
||||
where: {
|
||||
taskId: submission.task.id,
|
||||
workerId: submission.workerId,
|
||||
},
|
||||
});
|
||||
return payment || null;
|
||||
}
|
||||
/**
|
||||
* Get all payments for a worker
|
||||
*/
|
||||
static async getWorkerPayments(workerId) {
|
||||
return await connections_1.prisma.payment.findMany({
|
||||
where: { workerId },
|
||||
orderBy: { createdAt: "desc" },
|
||||
include: {
|
||||
task: {
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
paymentAmount: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Get payment statistics for a worker
|
||||
*/
|
||||
static async getPaymentStats(workerId) {
|
||||
const payments = await connections_1.prisma.payment.findMany({
|
||||
where: { workerId },
|
||||
});
|
||||
return {
|
||||
totalEarnings: payments
|
||||
.filter((p) => p.status === "completed")
|
||||
.reduce((sum, p) => sum + Number(p.amount), 0),
|
||||
completedPayments: payments.filter((p) => p.status === "completed")
|
||||
.length,
|
||||
pendingPayments: payments.filter((p) => p.status === "pending").length,
|
||||
failedPayments: payments.filter((p) => p.status === "failed").length,
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.paymentService = PaymentService;
|
||||
230
dmtp/server/src/services/payment.service.ts
Normal file
230
dmtp/server/src/services/payment.service.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import { prisma } from "../database/connections";
|
||||
import { blockchainService } from "./blockchain.service";
|
||||
import { notificationService, NotificationType } from "./notification.service";
|
||||
|
||||
const MAX_RETRY_ATTEMPTS = 3;
|
||||
const RETRY_DELAY_MS = 5000; // 5 seconds
|
||||
|
||||
/**
|
||||
* Payment Service
|
||||
* Handles payment processing, retries, and error recovery
|
||||
*/
|
||||
export class PaymentService {
|
||||
/**
|
||||
* Sleep helper function
|
||||
*/
|
||||
private static sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
/**
|
||||
* Approve submission with retry logic and error handling
|
||||
*/
|
||||
static async approveSubmissionWithRetry(
|
||||
taskId: string,
|
||||
submissionId: string,
|
||||
workerId: string,
|
||||
contractTaskId: number,
|
||||
paymentAmount: string
|
||||
): Promise<{
|
||||
success: boolean;
|
||||
txHash?: string;
|
||||
error?: string;
|
||||
attempts: number;
|
||||
}> {
|
||||
let lastError: any = null;
|
||||
let txHash: string | null = null;
|
||||
|
||||
for (let attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; attempt++) {
|
||||
try {
|
||||
console.log(
|
||||
`\n💳 Approving submission (Attempt ${attempt}/${MAX_RETRY_ATTEMPTS})`
|
||||
);
|
||||
|
||||
// Call blockchain to release payment
|
||||
txHash = await blockchainService.approveSubmission(contractTaskId);
|
||||
console.log(`✅ Blockchain approval successful: ${txHash}`);
|
||||
|
||||
// Update payment record with transaction hash
|
||||
await prisma.payment.updateMany({
|
||||
where: {
|
||||
taskId,
|
||||
workerId,
|
||||
},
|
||||
data: {
|
||||
transactionHash: txHash,
|
||||
status: "completed",
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`✅ Payment record updated with transaction hash`);
|
||||
|
||||
// Update submission with transaction hash
|
||||
await prisma.submission.update({
|
||||
where: { id: submissionId },
|
||||
data: {
|
||||
paymentTransactionHash: txHash,
|
||||
},
|
||||
});
|
||||
|
||||
// Notify worker of successful payment
|
||||
await notificationService.send(workerId, {
|
||||
type: NotificationType.PAYMENT_RELEASED,
|
||||
taskId,
|
||||
submissionId,
|
||||
amount: Number(paymentAmount),
|
||||
txHash: txHash,
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
txHash,
|
||||
attempts: attempt,
|
||||
};
|
||||
} catch (error: any) {
|
||||
lastError = error;
|
||||
console.error(`⚠️ Attempt ${attempt} failed:`, error.message);
|
||||
|
||||
// If this is not the last attempt, wait before retrying
|
||||
if (attempt < MAX_RETRY_ATTEMPTS) {
|
||||
console.log(`⏳ Retrying in ${RETRY_DELAY_MS}ms...`);
|
||||
await this.sleep(RETRY_DELAY_MS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// All retries failed - update payment status to failed
|
||||
console.error(`❌ All ${MAX_RETRY_ATTEMPTS} attempts failed`);
|
||||
|
||||
try {
|
||||
await prisma.payment.updateMany({
|
||||
where: {
|
||||
taskId,
|
||||
workerId,
|
||||
},
|
||||
data: {
|
||||
status: "failed",
|
||||
},
|
||||
});
|
||||
|
||||
// Notify worker of payment failure
|
||||
await notificationService.send(workerId, {
|
||||
type: NotificationType.SUBMISSION_REJECTED,
|
||||
taskId,
|
||||
submissionId,
|
||||
result: { error: lastError?.message || "Payment processing failed" },
|
||||
});
|
||||
} catch (updateError) {
|
||||
console.error("Failed to update payment status:", updateError);
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: lastError?.message,
|
||||
attempts: MAX_RETRY_ATTEMPTS,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback payment on verification failure
|
||||
*/
|
||||
static async rollbackPayment(taskId: string, workerId: string): Promise<void> {
|
||||
try {
|
||||
console.log(
|
||||
`🔙 Rolling back payment for task ${taskId}, worker ${workerId}`
|
||||
);
|
||||
|
||||
// Delete the pending payment record
|
||||
const deletedPayment = await prisma.payment.deleteMany({
|
||||
where: {
|
||||
taskId,
|
||||
workerId,
|
||||
status: "pending",
|
||||
},
|
||||
});
|
||||
|
||||
if (deletedPayment.count > 0) {
|
||||
console.log(
|
||||
`✅ Deleted ${deletedPayment.count} pending payment record(s)`
|
||||
);
|
||||
} else {
|
||||
console.log(`⚠️ No pending payment records found to delete`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to rollback payment:", error);
|
||||
throw new Error(`Payment rollback failed: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get payment status by submission ID
|
||||
*/
|
||||
static async getPaymentStatus(submissionId: string): Promise<any | null> {
|
||||
const submission = await prisma.submission.findUnique({
|
||||
where: { id: submissionId },
|
||||
include: {
|
||||
task: {
|
||||
select: { id: true },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (!submission) {
|
||||
throw new Error("Submission not found");
|
||||
}
|
||||
|
||||
const payment = await prisma.payment.findFirst({
|
||||
where: {
|
||||
taskId: submission.task.id,
|
||||
workerId: submission.workerId,
|
||||
},
|
||||
});
|
||||
|
||||
return payment || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all payments for a worker
|
||||
*/
|
||||
static async getWorkerPayments(workerId: string): Promise<any[]> {
|
||||
return await prisma.payment.findMany({
|
||||
where: { workerId },
|
||||
orderBy: { createdAt: "desc" },
|
||||
include: {
|
||||
task: {
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
paymentAmount: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get payment statistics for a worker
|
||||
*/
|
||||
static async getPaymentStats(workerId: string): Promise<{
|
||||
totalEarnings: number;
|
||||
completedPayments: number;
|
||||
pendingPayments: number;
|
||||
failedPayments: number;
|
||||
}> {
|
||||
const payments = await prisma.payment.findMany({
|
||||
where: { workerId },
|
||||
});
|
||||
|
||||
return {
|
||||
totalEarnings: payments
|
||||
.filter((p) => p.status === "completed")
|
||||
.reduce((sum, p) => sum + Number(p.amount), 0),
|
||||
completedPayments: payments.filter((p) => p.status === "completed")
|
||||
.length,
|
||||
pendingPayments: payments.filter((p) => p.status === "pending").length,
|
||||
failedPayments: payments.filter((p) => p.status === "failed").length,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const paymentService = PaymentService;
|
||||
134
dmtp/server/src/services/verification-worker.service.js
Normal file
134
dmtp/server/src/services/verification-worker.service.js
Normal file
@@ -0,0 +1,134 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.VerificationWorkerService = void 0;
|
||||
const connections_1 = require("../database/connections");
|
||||
const database_types_1 = require("../types/database.types");
|
||||
const moderation_types_1 = require("../types/moderation.types");
|
||||
const ai_verification_service_1 = require("./ai-verification.service");
|
||||
const blockchain_service_1 = require("./blockchain.service");
|
||||
const content_moderation_service_1 = require("./content-moderation.service");
|
||||
class VerificationWorkerService {
|
||||
/**
|
||||
* Process submission verification asynchronously
|
||||
*/
|
||||
static async processSubmission(submissionId) {
|
||||
try {
|
||||
console.log(`\n🔄 Processing submission ${submissionId}...`);
|
||||
// Get submission with task details
|
||||
const submission = await connections_1.prisma.submission.findUnique({
|
||||
where: { id: submissionId },
|
||||
include: {
|
||||
task: true,
|
||||
worker: true,
|
||||
},
|
||||
});
|
||||
if (!submission) {
|
||||
throw new Error(`Submission ${submissionId} not found`);
|
||||
}
|
||||
// Step 1: Content moderation
|
||||
console.log('🛡️ Running content moderation...');
|
||||
const moderationResult = await content_moderation_service_1.contentModerationService.moderateSubmission({
|
||||
content: JSON.stringify(submission.submissionData),
|
||||
context: {
|
||||
taskType: submission.task.taskType,
|
||||
userId: submission.workerId,
|
||||
},
|
||||
submissionId,
|
||||
});
|
||||
// Auto-reject if flagged as critical
|
||||
if (moderationResult.action === moderation_types_1.ModerationAction.AUTO_REJECT) {
|
||||
console.log('🚫 Submission auto-rejected by moderation');
|
||||
await connections_1.prisma.submission.update({
|
||||
where: { id: submissionId },
|
||||
data: {
|
||||
verificationStatus: database_types_1.VerificationStatus.REJECTED,
|
||||
aiVerificationResult: JSON.parse(JSON.stringify({
|
||||
moderation: moderationResult,
|
||||
rejected: true,
|
||||
reason: 'Content moderation failed',
|
||||
})), // ← FIX: Cast to any after JSON conversion
|
||||
},
|
||||
});
|
||||
// TODO: Notify worker
|
||||
return;
|
||||
}
|
||||
// Step 2: AI verification (if moderation passed)
|
||||
console.log('🤖 Running AI verification...');
|
||||
const verificationResult = await ai_verification_service_1.aiVerificationService.verifyTextTask({
|
||||
submissionText: JSON.stringify(submission.submissionData),
|
||||
verificationCriteria: JSON.stringify(submission.task.verificationCriteria),
|
||||
taskType: submission.task.taskType,
|
||||
});
|
||||
// Update submission with verification result
|
||||
await connections_1.prisma.submission.update({
|
||||
where: { id: submissionId },
|
||||
data: {
|
||||
aiVerificationResult: JSON.parse(JSON.stringify({
|
||||
moderation: moderationResult,
|
||||
verification: verificationResult,
|
||||
})), // ← FIX: Cast to any after JSON conversion
|
||||
verificationStatus: verificationResult.approved
|
||||
? database_types_1.VerificationStatus.APPROVED
|
||||
: database_types_1.VerificationStatus.REJECTED,
|
||||
},
|
||||
});
|
||||
// Step 3: If approved, release payment via smart contract
|
||||
if (verificationResult.approved) {
|
||||
console.log('✅ Submission approved! Releasing payment...');
|
||||
// Check if contractTaskId exists
|
||||
if (!submission.task.contractTaskId) {
|
||||
throw new Error('Contract task ID not found');
|
||||
}
|
||||
const txHash = await blockchain_service_1.blockchainService.approveSubmission(submission.task.contractTaskId // ← FIX: Now this field exists
|
||||
);
|
||||
// Update payment record
|
||||
await connections_1.prisma.payment.create({
|
||||
data: {
|
||||
taskId: submission.taskId,
|
||||
workerId: submission.workerId,
|
||||
amount: submission.task.paymentAmount,
|
||||
transactionHash: txHash,
|
||||
status: 'completed',
|
||||
},
|
||||
});
|
||||
// Update task status
|
||||
await connections_1.prisma.task.update({
|
||||
where: { id: submission.taskId },
|
||||
data: { status: 'completed' },
|
||||
});
|
||||
// Update worker earnings and task count
|
||||
await connections_1.prisma.user.update({
|
||||
where: { id: submission.workerId },
|
||||
data: {
|
||||
totalEarnings: {
|
||||
increment: submission.task.paymentAmount,
|
||||
},
|
||||
totalTasksCompleted: {
|
||||
increment: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
console.log(`💰 Payment released! Tx: ${txHash}`);
|
||||
}
|
||||
else {
|
||||
console.log('❌ Submission rejected by AI verification');
|
||||
}
|
||||
console.log(`✅ Verification complete for submission ${submissionId}`);
|
||||
}
|
||||
catch (error) {
|
||||
console.error(`❌ Verification failed for submission ${submissionId}:`, error);
|
||||
// Mark as pending for manual review
|
||||
await connections_1.prisma.submission.update({
|
||||
where: { id: submissionId },
|
||||
data: {
|
||||
verificationStatus: database_types_1.VerificationStatus.PENDING,
|
||||
aiVerificationResult: JSON.parse(JSON.stringify({
|
||||
error: String(error),
|
||||
timestamp: new Date().toISOString(),
|
||||
})), // ← FIX: Cast to any after JSON conversion
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.VerificationWorkerService = VerificationWorkerService;
|
||||
147
dmtp/server/src/services/verification-worker.service.ts
Normal file
147
dmtp/server/src/services/verification-worker.service.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { prisma } from '../database/connections';
|
||||
import { VerificationStatus } from '../types/database.types';
|
||||
import { ModerationAction } from '../types/moderation.types';
|
||||
import { aiVerificationService } from './ai-verification.service';
|
||||
import { blockchainService } from './blockchain.service';
|
||||
import { contentModerationService } from './content-moderation.service';
|
||||
|
||||
export class VerificationWorkerService {
|
||||
/**
|
||||
* Process submission verification asynchronously
|
||||
*/
|
||||
static async processSubmission(submissionId: string): Promise<void> {
|
||||
try {
|
||||
console.log(`\n🔄 Processing submission ${submissionId}...`);
|
||||
|
||||
// Get submission with task details
|
||||
const submission = await prisma.submission.findUnique({
|
||||
where: { id: submissionId },
|
||||
include: {
|
||||
task: true,
|
||||
worker: true,
|
||||
},
|
||||
});
|
||||
|
||||
if (!submission) {
|
||||
throw new Error(`Submission ${submissionId} not found`);
|
||||
}
|
||||
|
||||
// Step 1: Content moderation
|
||||
console.log('🛡️ Running content moderation...');
|
||||
const moderationResult = await contentModerationService.moderateSubmission({
|
||||
content: JSON.stringify(submission.submissionData),
|
||||
context: {
|
||||
taskType: submission.task.taskType,
|
||||
userId: submission.workerId,
|
||||
},
|
||||
submissionId,
|
||||
});
|
||||
|
||||
// Auto-reject if flagged as critical
|
||||
if (moderationResult.action === ModerationAction.AUTO_REJECT) {
|
||||
console.log('🚫 Submission auto-rejected by moderation');
|
||||
|
||||
await prisma.submission.update({
|
||||
where: { id: submissionId },
|
||||
data: {
|
||||
verificationStatus: VerificationStatus.REJECTED,
|
||||
aiVerificationResult: JSON.parse(JSON.stringify({
|
||||
moderation: moderationResult,
|
||||
rejected: true,
|
||||
reason: 'Content moderation failed',
|
||||
})) as any, // ← FIX: Cast to any after JSON conversion
|
||||
},
|
||||
});
|
||||
|
||||
// TODO: Notify worker
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 2: AI verification (if moderation passed)
|
||||
console.log('🤖 Running AI verification...');
|
||||
const verificationResult = await aiVerificationService.verifyTextTask({
|
||||
submissionText: JSON.stringify(submission.submissionData),
|
||||
verificationCriteria: JSON.stringify(submission.task.verificationCriteria),
|
||||
taskType: submission.task.taskType,
|
||||
});
|
||||
|
||||
// Update submission with verification result
|
||||
await prisma.submission.update({
|
||||
where: { id: submissionId },
|
||||
data: {
|
||||
aiVerificationResult: JSON.parse(JSON.stringify({
|
||||
moderation: moderationResult,
|
||||
verification: verificationResult,
|
||||
})) as any, // ← FIX: Cast to any after JSON conversion
|
||||
verificationStatus: verificationResult.approved
|
||||
? VerificationStatus.APPROVED
|
||||
: VerificationStatus.REJECTED,
|
||||
},
|
||||
});
|
||||
|
||||
// Step 3: If approved, release payment via smart contract
|
||||
if (verificationResult.approved) {
|
||||
console.log('✅ Submission approved! Releasing payment...');
|
||||
|
||||
// Check if contractTaskId exists
|
||||
if (!submission.task.contractTaskId) {
|
||||
throw new Error('Contract task ID not found');
|
||||
}
|
||||
|
||||
const txHash = await blockchainService.approveSubmission(
|
||||
submission.task.contractTaskId // ← FIX: Now this field exists
|
||||
);
|
||||
|
||||
// Update payment record
|
||||
await prisma.payment.create({
|
||||
data: {
|
||||
taskId: submission.taskId,
|
||||
workerId: submission.workerId,
|
||||
amount: submission.task.paymentAmount,
|
||||
transactionHash: txHash,
|
||||
status: 'completed',
|
||||
},
|
||||
});
|
||||
|
||||
// Update task status
|
||||
await prisma.task.update({
|
||||
where: { id: submission.taskId },
|
||||
data: { status: 'completed' },
|
||||
});
|
||||
|
||||
// Update worker earnings and task count
|
||||
await prisma.user.update({
|
||||
where: { id: submission.workerId },
|
||||
data: {
|
||||
totalEarnings: {
|
||||
increment: submission.task.paymentAmount,
|
||||
},
|
||||
totalTasksCompleted: { // ← FIX: Now this field exists
|
||||
increment: 1,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`💰 Payment released! Tx: ${txHash}`);
|
||||
} else {
|
||||
console.log('❌ Submission rejected by AI verification');
|
||||
}
|
||||
|
||||
console.log(`✅ Verification complete for submission ${submissionId}`);
|
||||
} catch (error) {
|
||||
console.error(`❌ Verification failed for submission ${submissionId}:`, error);
|
||||
|
||||
// Mark as pending for manual review
|
||||
await prisma.submission.update({
|
||||
where: { id: submissionId },
|
||||
data: {
|
||||
verificationStatus: VerificationStatus.PENDING,
|
||||
aiVerificationResult: JSON.parse(JSON.stringify({
|
||||
error: String(error),
|
||||
timestamp: new Date().toISOString(),
|
||||
})) as any, // ← FIX: Cast to any after JSON conversion
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
158
dmtp/server/src/services/webhook.service.js
Normal file
158
dmtp/server/src/services/webhook.service.js
Normal file
@@ -0,0 +1,158 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.webhookService = exports.WebhookEvent = void 0;
|
||||
const axios_1 = __importDefault(require("axios"));
|
||||
var WebhookEvent;
|
||||
(function (WebhookEvent) {
|
||||
WebhookEvent["PAYMENT_COMPLETED"] = "payment.completed";
|
||||
WebhookEvent["PAYMENT_FAILED"] = "payment.failed";
|
||||
WebhookEvent["SUBMISSION_APPROVED"] = "submission.approved";
|
||||
WebhookEvent["SUBMISSION_REJECTED"] = "submission.rejected";
|
||||
WebhookEvent["TASK_COMPLETED"] = "task.completed";
|
||||
WebhookEvent["WORKER_ASSIGNED"] = "worker.assigned";
|
||||
})(WebhookEvent || (exports.WebhookEvent = WebhookEvent = {}));
|
||||
const MAX_WEBHOOK_RETRIES = 3;
|
||||
const WEBHOOK_TIMEOUT_MS = 10000;
|
||||
const RETRY_DELAY_MS = 5000;
|
||||
/**
|
||||
* Webhook Service
|
||||
* Handles outbound webhooks for payment and submission events
|
||||
*/
|
||||
class WebhookService {
|
||||
/**
|
||||
* Send webhook with retry logic
|
||||
*/
|
||||
static async sendWebhook(event, data, webhookUrl) {
|
||||
// Get webhook URL from environment or parameter
|
||||
const url = webhookUrl || process.env.WEBHOOK_ENDPOINT || process.env.NOTIFICATION_WEBHOOK_URL;
|
||||
if (!url) {
|
||||
console.warn("⚠️ No webhook URL configured, skipping webhook");
|
||||
return {
|
||||
success: true,
|
||||
attempts: 0,
|
||||
};
|
||||
}
|
||||
const payload = {
|
||||
event,
|
||||
timestamp: new Date().toISOString(),
|
||||
data,
|
||||
};
|
||||
let lastError = null;
|
||||
for (let attempt = 1; attempt <= MAX_WEBHOOK_RETRIES; attempt++) {
|
||||
try {
|
||||
console.log(`🪝 Sending webhook [${event}] (Attempt ${attempt}/${MAX_WEBHOOK_RETRIES})`);
|
||||
const response = await this.client.post(url, {
|
||||
...payload,
|
||||
attemptNumber: attempt,
|
||||
});
|
||||
if (response.status >= 200 && response.status < 300) {
|
||||
console.log(`✅ Webhook sent successfully: ${response.status}`);
|
||||
return {
|
||||
success: true,
|
||||
attempts: attempt,
|
||||
};
|
||||
}
|
||||
lastError = new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
catch (error) {
|
||||
lastError = error;
|
||||
console.error(`⚠️ Webhook attempt ${attempt} failed:`, lastError.message);
|
||||
// If not the last attempt, wait before retrying
|
||||
if (attempt < MAX_WEBHOOK_RETRIES) {
|
||||
console.log(`⏳ Retrying in ${RETRY_DELAY_MS}ms...`);
|
||||
await this.sleep(RETRY_DELAY_MS);
|
||||
}
|
||||
}
|
||||
}
|
||||
console.error(`❌ Webhook failed after ${MAX_WEBHOOK_RETRIES} attempts: ${lastError?.message}`);
|
||||
return {
|
||||
success: false,
|
||||
attempts: MAX_WEBHOOK_RETRIES,
|
||||
error: lastError?.message,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Send payment completed webhook
|
||||
*/
|
||||
static async sendPaymentCompleted(data) {
|
||||
try {
|
||||
await this.sendWebhook(WebhookEvent.PAYMENT_COMPLETED, data);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Failed to send payment completed webhook:", error);
|
||||
// Don't throw - webhooks are non-critical
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Send payment failed webhook
|
||||
*/
|
||||
static async sendPaymentFailed(data) {
|
||||
try {
|
||||
await this.sendWebhook(WebhookEvent.PAYMENT_FAILED, data);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Failed to send payment failed webhook:", error);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Send submission approved webhook
|
||||
*/
|
||||
static async sendSubmissionApproved(data) {
|
||||
try {
|
||||
await this.sendWebhook(WebhookEvent.SUBMISSION_APPROVED, data);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Failed to send submission approved webhook:", error);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Send submission rejected webhook
|
||||
*/
|
||||
static async sendSubmissionRejected(data) {
|
||||
try {
|
||||
await this.sendWebhook(WebhookEvent.SUBMISSION_REJECTED, data);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Failed to send submission rejected webhook:", error);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Send task completed webhook
|
||||
*/
|
||||
static async sendTaskCompleted(data) {
|
||||
try {
|
||||
await this.sendWebhook(WebhookEvent.TASK_COMPLETED, data);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Failed to send task completed webhook:", error);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Send worker assigned webhook
|
||||
*/
|
||||
static async sendWorkerAssigned(data) {
|
||||
try {
|
||||
await this.sendWebhook(WebhookEvent.WORKER_ASSIGNED, data);
|
||||
}
|
||||
catch (error) {
|
||||
console.error("Failed to send worker assigned webhook:", error);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sleep helper
|
||||
*/
|
||||
static sleep(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
WebhookService.client = axios_1.default.create({
|
||||
timeout: WEBHOOK_TIMEOUT_MS,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "TaskEscrow-WebhookService/1.0",
|
||||
},
|
||||
});
|
||||
exports.webhookService = WebhookService;
|
||||
2
dmtp/server/src/types/ai.types.js
Normal file
2
dmtp/server/src/types/ai.types.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
67
dmtp/server/src/types/ai.types.ts
Normal file
67
dmtp/server/src/types/ai.types.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
// Verification result interfaces
|
||||
export interface VerificationResult {
|
||||
approved: boolean;
|
||||
score: number;
|
||||
reasoning: string;
|
||||
violations?: string[];
|
||||
issues?: string[];
|
||||
imageQuality?: 'excellent' | 'good' | 'poor';
|
||||
timestamp: string;
|
||||
geminiResponse?: string;
|
||||
}
|
||||
|
||||
// Text verification input
|
||||
export interface TextVerificationInput {
|
||||
submissionText: string;
|
||||
verificationCriteria: string;
|
||||
taskType?: string;
|
||||
}
|
||||
|
||||
// Image verification input
|
||||
export interface ImageVerificationInput {
|
||||
imageUrl: string;
|
||||
taskDescription: string;
|
||||
verificationCriteria: string;
|
||||
submissionData?: any;
|
||||
}
|
||||
|
||||
// Gemini API response structure
|
||||
export interface GeminiResponse {
|
||||
approved: boolean;
|
||||
score: number;
|
||||
violations?: string[];
|
||||
issues?: string[];
|
||||
reasoning: string;
|
||||
imageQuality?: 'excellent' | 'good' | 'poor';
|
||||
}
|
||||
|
||||
// Cache configuration
|
||||
export interface CacheConfig {
|
||||
enabled: boolean;
|
||||
ttl: number; // Time to live in seconds
|
||||
}
|
||||
|
||||
// Retry configuration
|
||||
export interface RetryConfig {
|
||||
maxRetries: number;
|
||||
initialDelay: number;
|
||||
maxDelay: number;
|
||||
backoffMultiplier: number;
|
||||
}
|
||||
|
||||
// Rate limit configuration
|
||||
export interface RateLimitConfig {
|
||||
maxRequests: number;
|
||||
windowMs: number;
|
||||
}
|
||||
|
||||
// AI Service configuration
|
||||
export interface AIServiceConfig {
|
||||
apiKey: string;
|
||||
model: string;
|
||||
temperature: number;
|
||||
maxOutputTokens: number;
|
||||
cache: CacheConfig;
|
||||
retry: RetryConfig;
|
||||
rateLimit: RateLimitConfig;
|
||||
}
|
||||
2
dmtp/server/src/types/api.types.js
Normal file
2
dmtp/server/src/types/api.types.js
Normal file
@@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
86
dmtp/server/src/types/api.types.ts
Normal file
86
dmtp/server/src/types/api.types.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { Request } from 'express';
|
||||
import { TaskStatus, TaskType } from './database.types';
|
||||
|
||||
// Extend Express Request to include authenticated user
|
||||
export interface AuthenticatedRequest extends Request {
|
||||
user?: {
|
||||
walletAddress: string;
|
||||
userId?: string;
|
||||
};
|
||||
}
|
||||
|
||||
// API Response types
|
||||
export interface ApiResponse<T = any> {
|
||||
success: boolean;
|
||||
data?: T;
|
||||
error?: {
|
||||
message: string;
|
||||
code: string;
|
||||
details?: any;
|
||||
};
|
||||
meta?: {
|
||||
page?: number;
|
||||
limit?: number;
|
||||
total?: number;
|
||||
};
|
||||
}
|
||||
|
||||
// Task DTOs
|
||||
export interface CreateTaskDto {
|
||||
title: string;
|
||||
description: string;
|
||||
taskType: TaskType;
|
||||
paymentAmount: number;
|
||||
verificationCriteria: {
|
||||
requiredFields: string[];
|
||||
aiPrompt: string;
|
||||
minConfidenceScore?: number;
|
||||
};
|
||||
maxSubmissions: number;
|
||||
expiresAt: string; // ISO date string
|
||||
}
|
||||
|
||||
export interface TaskListQuery {
|
||||
status?: TaskStatus;
|
||||
taskType?: TaskType;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
sortBy?: 'payment' | 'createdAt' | 'expiresAt';
|
||||
sortOrder?: 'asc' | 'desc';
|
||||
}
|
||||
|
||||
// Submission DTOs
|
||||
export interface CreateSubmissionDto {
|
||||
taskId: string;
|
||||
submissionData: {
|
||||
text?: string;
|
||||
imageUrls?: string[];
|
||||
answers?: Record<string, any>;
|
||||
metadata?: Record<string, any>;
|
||||
};
|
||||
}
|
||||
|
||||
export interface VerificationWebhookDto {
|
||||
submissionId: string;
|
||||
verificationResult: {
|
||||
approved: boolean;
|
||||
score: number;
|
||||
reasoning: string;
|
||||
violations?: string[];
|
||||
};
|
||||
}
|
||||
|
||||
// User DTOs
|
||||
export interface RegisterUserDto {
|
||||
walletAddress: string;
|
||||
phoneNumber?: string;
|
||||
role: 'requester' | 'worker';
|
||||
}
|
||||
|
||||
// Wallet signature verification
|
||||
export interface SignatureVerificationDto {
|
||||
walletAddress: string;
|
||||
signature: string;
|
||||
message: string;
|
||||
timestamp: number;
|
||||
}
|
||||
35
dmtp/server/src/types/database.types.js
Normal file
35
dmtp/server/src/types/database.types.js
Normal file
@@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.PaymentStatus = exports.VerificationStatus = exports.TaskStatus = exports.TaskType = exports.UserRole = void 0;
|
||||
// Enums
|
||||
var UserRole;
|
||||
(function (UserRole) {
|
||||
UserRole["REQUESTER"] = "requester";
|
||||
UserRole["WORKER"] = "worker";
|
||||
})(UserRole || (exports.UserRole = UserRole = {}));
|
||||
var TaskType;
|
||||
(function (TaskType) {
|
||||
TaskType["TEXT_VERIFICATION"] = "text_verification";
|
||||
TaskType["IMAGE_LABELING"] = "image_labeling";
|
||||
TaskType["SURVEY"] = "survey";
|
||||
TaskType["CONTENT_MODERATION"] = "content_moderation";
|
||||
})(TaskType || (exports.TaskType = TaskType = {}));
|
||||
var TaskStatus;
|
||||
(function (TaskStatus) {
|
||||
TaskStatus["OPEN"] = "open";
|
||||
TaskStatus["IN_PROGRESS"] = "in_progress";
|
||||
TaskStatus["COMPLETED"] = "completed";
|
||||
TaskStatus["EXPIRED"] = "expired";
|
||||
})(TaskStatus || (exports.TaskStatus = TaskStatus = {}));
|
||||
var VerificationStatus;
|
||||
(function (VerificationStatus) {
|
||||
VerificationStatus["PENDING"] = "pending";
|
||||
VerificationStatus["APPROVED"] = "approved";
|
||||
VerificationStatus["REJECTED"] = "rejected";
|
||||
})(VerificationStatus || (exports.VerificationStatus = VerificationStatus = {}));
|
||||
var PaymentStatus;
|
||||
(function (PaymentStatus) {
|
||||
PaymentStatus["PENDING"] = "pending";
|
||||
PaymentStatus["COMPLETED"] = "completed";
|
||||
PaymentStatus["FAILED"] = "failed";
|
||||
})(PaymentStatus || (exports.PaymentStatus = PaymentStatus = {}));
|
||||
153
dmtp/server/src/types/database.types.ts
Normal file
153
dmtp/server/src/types/database.types.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
// Enums
|
||||
export enum UserRole {
|
||||
REQUESTER = 'requester',
|
||||
WORKER = 'worker',
|
||||
}
|
||||
|
||||
export enum TaskType {
|
||||
TEXT_VERIFICATION = 'text_verification',
|
||||
IMAGE_LABELING = 'image_labeling',
|
||||
SURVEY = 'survey',
|
||||
CONTENT_MODERATION = 'content_moderation',
|
||||
}
|
||||
|
||||
export enum TaskStatus {
|
||||
OPEN = 'open',
|
||||
IN_PROGRESS = 'in_progress',
|
||||
COMPLETED = 'completed',
|
||||
EXPIRED = 'expired',
|
||||
}
|
||||
|
||||
export enum VerificationStatus {
|
||||
PENDING = 'pending',
|
||||
APPROVED = 'approved',
|
||||
REJECTED = 'rejected',
|
||||
}
|
||||
|
||||
export enum PaymentStatus {
|
||||
PENDING = 'pending',
|
||||
COMPLETED = 'completed',
|
||||
FAILED = 'failed',
|
||||
}
|
||||
|
||||
// User Interface
|
||||
export interface User {
|
||||
id: string;
|
||||
walletAddress: string;
|
||||
phoneNumber?: string | null;
|
||||
role: UserRole;
|
||||
reputationScore: number;
|
||||
totalEarnings: number;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
// Task Interface
|
||||
export interface Task {
|
||||
id: string;
|
||||
requesterId: string;
|
||||
title: string;
|
||||
description: string;
|
||||
taskType: TaskType;
|
||||
paymentAmount: number;
|
||||
status: TaskStatus;
|
||||
verificationCriteria: VerificationCriteria;
|
||||
maxSubmissions: number;
|
||||
expiresAt: Date;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
// Verification Criteria (stored as JSONB)
|
||||
export interface VerificationCriteria {
|
||||
requiredFields: string[];
|
||||
aiPrompt: string;
|
||||
minConfidenceScore?: number;
|
||||
customRules?: Record<string, any>;
|
||||
}
|
||||
|
||||
// Submission Interface
|
||||
export interface Submission {
|
||||
id: string;
|
||||
taskId: string;
|
||||
workerId: string;
|
||||
submissionData: SubmissionData;
|
||||
aiVerificationResult?: AIVerificationResult | null;
|
||||
verificationStatus: VerificationStatus;
|
||||
paymentTransactionHash?: string | null;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
}
|
||||
|
||||
// Submission Data (stored as JSONB - flexible structure)
|
||||
export interface SubmissionData {
|
||||
text?: string;
|
||||
imageUrls?: string[];
|
||||
answers?: Record<string, any>;
|
||||
metadata?: Record<string, any>;
|
||||
}
|
||||
|
||||
// AI Verification Result (Gemini API response)
|
||||
export interface AIVerificationResult {
|
||||
verified: boolean;
|
||||
confidenceScore: number;
|
||||
reasoning: string;
|
||||
geminiResponse: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
// Payment Interface
|
||||
export interface Payment {
|
||||
id: string;
|
||||
taskId: string;
|
||||
workerId: string;
|
||||
amount: number;
|
||||
transactionHash: string;
|
||||
status: PaymentStatus;
|
||||
createdAt: Date;
|
||||
}
|
||||
|
||||
// DTOs (Data Transfer Objects) for API
|
||||
|
||||
export interface CreateUserDto {
|
||||
walletAddress: string;
|
||||
phoneNumber?: string;
|
||||
role: UserRole;
|
||||
}
|
||||
|
||||
export interface CreateTaskDto {
|
||||
requesterId: string;
|
||||
title: string;
|
||||
description: string;
|
||||
taskType: TaskType;
|
||||
paymentAmount: number;
|
||||
verificationCriteria: VerificationCriteria;
|
||||
maxSubmissions: number;
|
||||
expiresAt: Date;
|
||||
}
|
||||
|
||||
export interface CreateSubmissionDto {
|
||||
taskId: string;
|
||||
workerId: string;
|
||||
submissionData: SubmissionData;
|
||||
}
|
||||
|
||||
export interface CreatePaymentDto {
|
||||
taskId: string;
|
||||
workerId: string;
|
||||
amount: number;
|
||||
transactionHash: string;
|
||||
}
|
||||
|
||||
// Query Filter Types
|
||||
export interface TaskFilters {
|
||||
status?: TaskStatus;
|
||||
taskType?: TaskType;
|
||||
requesterId?: string;
|
||||
}
|
||||
|
||||
export interface SubmissionFilters {
|
||||
taskId?: string;
|
||||
workerId?: string;
|
||||
verificationStatus?: VerificationStatus;
|
||||
}
|
||||
19
dmtp/server/src/types/moderation.types.js
Normal file
19
dmtp/server/src/types/moderation.types.js
Normal file
@@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ModerationAction = exports.ModerationSeverity = void 0;
|
||||
// Severity levels
|
||||
var ModerationSeverity;
|
||||
(function (ModerationSeverity) {
|
||||
ModerationSeverity["NONE"] = "NONE";
|
||||
ModerationSeverity["LOW"] = "LOW";
|
||||
ModerationSeverity["MEDIUM"] = "MEDIUM";
|
||||
ModerationSeverity["HIGH"] = "HIGH";
|
||||
ModerationSeverity["CRITICAL"] = "CRITICAL";
|
||||
})(ModerationSeverity || (exports.ModerationSeverity = ModerationSeverity = {}));
|
||||
// Moderation actions
|
||||
var ModerationAction;
|
||||
(function (ModerationAction) {
|
||||
ModerationAction["APPROVE"] = "APPROVE";
|
||||
ModerationAction["FLAG_REVIEW"] = "FLAG_REVIEW";
|
||||
ModerationAction["AUTO_REJECT"] = "AUTO_REJECT";
|
||||
})(ModerationAction || (exports.ModerationAction = ModerationAction = {}));
|
||||
86
dmtp/server/src/types/moderation.types.ts
Normal file
86
dmtp/server/src/types/moderation.types.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
// Severity levels
|
||||
export enum ModerationSeverity {
|
||||
NONE = 'NONE',
|
||||
LOW = 'LOW',
|
||||
MEDIUM = 'MEDIUM',
|
||||
HIGH = 'HIGH',
|
||||
CRITICAL = 'CRITICAL',
|
||||
}
|
||||
|
||||
// Moderation actions
|
||||
export enum ModerationAction {
|
||||
APPROVE = 'APPROVE',
|
||||
FLAG_REVIEW = 'FLAG_REVIEW',
|
||||
AUTO_REJECT = 'AUTO_REJECT',
|
||||
}
|
||||
|
||||
// Category detection result
|
||||
export interface CategoryDetection {
|
||||
detected: boolean;
|
||||
confidence: number; // 0-100
|
||||
severity: ModerationSeverity;
|
||||
examples?: string[]; // Specific examples found
|
||||
}
|
||||
|
||||
// All moderation categories
|
||||
export interface ModerationCategories {
|
||||
spam: CategoryDetection;
|
||||
toxic: CategoryDetection;
|
||||
hate_speech: CategoryDetection;
|
||||
fraud: CategoryDetection;
|
||||
inappropriate: CategoryDetection;
|
||||
}
|
||||
|
||||
// Main moderation result
|
||||
export interface ModerationResult {
|
||||
flagged: boolean;
|
||||
categories: ModerationCategories;
|
||||
action: ModerationAction;
|
||||
explanation: string;
|
||||
timestamp: string;
|
||||
submissionId?: string;
|
||||
geminiResponse?: string;
|
||||
}
|
||||
|
||||
// Input for moderation
|
||||
export interface ModerationInput {
|
||||
content: string;
|
||||
contentType?: 'text' | 'image' | 'mixed';
|
||||
context?: {
|
||||
taskType?: string;
|
||||
userId?: string;
|
||||
previousViolations?: number;
|
||||
};
|
||||
submissionId?: string;
|
||||
}
|
||||
|
||||
// Gemini moderation response structure
|
||||
export interface GeminiModerationResponse {
|
||||
flagged: boolean;
|
||||
categories: {
|
||||
spam: { detected: boolean; confidence: number; severity: string };
|
||||
toxic: { detected: boolean; confidence: number; severity: string };
|
||||
hate_speech: { detected: boolean; confidence: number; severity: string };
|
||||
fraud: { detected: boolean; confidence: number; severity: string };
|
||||
inappropriate: { detected: boolean; confidence: number; severity: string };
|
||||
};
|
||||
action: 'APPROVE' | 'FLAG_REVIEW' | 'AUTO_REJECT';
|
||||
explanation: string;
|
||||
}
|
||||
|
||||
// Filter rules
|
||||
export interface FilterRule {
|
||||
pattern: string | RegExp;
|
||||
category: keyof ModerationCategories;
|
||||
severity: ModerationSeverity;
|
||||
description: string;
|
||||
}
|
||||
|
||||
// Moderation statistics
|
||||
export interface ModerationStats {
|
||||
totalChecks: number;
|
||||
approved: number;
|
||||
flagged: number;
|
||||
rejected: number;
|
||||
byCategory: Record<keyof ModerationCategories, number>;
|
||||
}
|
||||
132
dmtp/server/src/utils/moderation-logger.js
Normal file
132
dmtp/server/src/utils/moderation-logger.js
Normal file
@@ -0,0 +1,132 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.moderationLogger = exports.ModerationLogger = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
class ModerationLogger {
|
||||
constructor(logDirectory = './logs') {
|
||||
// Create logs directory if it doesn't exist
|
||||
if (!fs.existsSync(logDirectory)) {
|
||||
fs.mkdirSync(logDirectory, { recursive: true });
|
||||
}
|
||||
this.logFile = path.join(logDirectory, 'moderation.log');
|
||||
}
|
||||
/**
|
||||
* Log moderation decision
|
||||
*/
|
||||
log(input, result) {
|
||||
const logEntry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
submissionId: input.submissionId,
|
||||
action: result.action,
|
||||
flagged: result.flagged,
|
||||
categories: result.categories,
|
||||
contentPreview: this.sanitizeContent(input.content),
|
||||
explanation: result.explanation,
|
||||
};
|
||||
const logLine = JSON.stringify(logEntry) + '\n';
|
||||
// Append to log file
|
||||
fs.appendFileSync(this.logFile, logLine, 'utf8');
|
||||
// Also log to console
|
||||
this.consoleLog(result);
|
||||
}
|
||||
/**
|
||||
* Console log with colors
|
||||
*/
|
||||
consoleLog(result) {
|
||||
const emoji = result.flagged ? '🚨' : '✅';
|
||||
const action = result.action;
|
||||
console.log(`\n${emoji} Moderation Result: ${action}`);
|
||||
if (result.flagged) {
|
||||
console.log('📋 Violations detected:');
|
||||
Object.entries(result.categories).forEach(([category, detection]) => {
|
||||
if (detection.detected) {
|
||||
console.log(` - ${category}: ${detection.severity} (${detection.confidence}% confidence)`);
|
||||
}
|
||||
});
|
||||
}
|
||||
console.log(`💬 ${result.explanation}\n`);
|
||||
}
|
||||
/**
|
||||
* Sanitize content for logging (hide sensitive info)
|
||||
*/
|
||||
sanitizeContent(content) {
|
||||
// Truncate to 100 chars
|
||||
const truncated = content.substring(0, 100);
|
||||
// Remove emails and URLs
|
||||
return truncated
|
||||
.replace(/[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}/g, '[EMAIL]')
|
||||
.replace(/https?:\/\/[^\s]+/g, '[URL]');
|
||||
}
|
||||
/**
|
||||
* Get recent logs
|
||||
*/
|
||||
getRecentLogs(limit = 100) {
|
||||
try {
|
||||
const content = fs.readFileSync(this.logFile, 'utf8');
|
||||
const lines = content.trim().split('\n').slice(-limit);
|
||||
return lines.map((line) => JSON.parse(line));
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Error reading logs:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get moderation statistics
|
||||
*/
|
||||
getStats() {
|
||||
const logs = this.getRecentLogs(1000);
|
||||
const stats = {
|
||||
total: logs.length,
|
||||
approved: logs.filter((log) => log.action === 'APPROVE').length,
|
||||
flagged: logs.filter((log) => log.action === 'FLAG_REVIEW').length,
|
||||
rejected: logs.filter((log) => log.action === 'AUTO_REJECT').length,
|
||||
byCategory: {},
|
||||
};
|
||||
// Count by category
|
||||
logs.forEach((log) => {
|
||||
Object.entries(log.categories).forEach(([category, detection]) => {
|
||||
if (detection.detected) {
|
||||
stats.byCategory[category] = (stats.byCategory[category] || 0) + 1;
|
||||
}
|
||||
});
|
||||
});
|
||||
return stats;
|
||||
}
|
||||
}
|
||||
exports.ModerationLogger = ModerationLogger;
|
||||
exports.moderationLogger = new ModerationLogger();
|
||||
115
dmtp/server/src/utils/moderation-logger.ts
Normal file
115
dmtp/server/src/utils/moderation-logger.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { ModerationInput, ModerationResult } from '../types/moderation.types';
|
||||
|
||||
export class ModerationLogger {
|
||||
private logFile: string;
|
||||
|
||||
constructor(logDirectory: string = './logs') {
|
||||
// Create logs directory if it doesn't exist
|
||||
if (!fs.existsSync(logDirectory)) {
|
||||
fs.mkdirSync(logDirectory, { recursive: true });
|
||||
}
|
||||
|
||||
this.logFile = path.join(logDirectory, 'moderation.log');
|
||||
}
|
||||
|
||||
/**
|
||||
* Log moderation decision
|
||||
*/
|
||||
log(input: ModerationInput, result: ModerationResult): void {
|
||||
const logEntry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
submissionId: input.submissionId,
|
||||
action: result.action,
|
||||
flagged: result.flagged,
|
||||
categories: result.categories,
|
||||
contentPreview: this.sanitizeContent(input.content),
|
||||
explanation: result.explanation,
|
||||
};
|
||||
|
||||
const logLine = JSON.stringify(logEntry) + '\n';
|
||||
|
||||
// Append to log file
|
||||
fs.appendFileSync(this.logFile, logLine, 'utf8');
|
||||
|
||||
// Also log to console
|
||||
this.consoleLog(result);
|
||||
}
|
||||
|
||||
/**
|
||||
* Console log with colors
|
||||
*/
|
||||
private consoleLog(result: ModerationResult): void {
|
||||
const emoji = result.flagged ? '🚨' : '✅';
|
||||
const action = result.action;
|
||||
|
||||
console.log(`\n${emoji} Moderation Result: ${action}`);
|
||||
|
||||
if (result.flagged) {
|
||||
console.log('📋 Violations detected:');
|
||||
Object.entries(result.categories).forEach(([category, detection]) => {
|
||||
if (detection.detected) {
|
||||
console.log(` - ${category}: ${detection.severity} (${detection.confidence}% confidence)`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
console.log(`💬 ${result.explanation}\n`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize content for logging (hide sensitive info)
|
||||
*/
|
||||
private sanitizeContent(content: string): string {
|
||||
// Truncate to 100 chars
|
||||
const truncated = content.substring(0, 100);
|
||||
|
||||
// Remove emails and URLs
|
||||
return truncated
|
||||
.replace(/[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}/g, '[EMAIL]')
|
||||
.replace(/https?:\/\/[^\s]+/g, '[URL]');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent logs
|
||||
*/
|
||||
getRecentLogs(limit: number = 100): any[] {
|
||||
try {
|
||||
const content = fs.readFileSync(this.logFile, 'utf8');
|
||||
const lines = content.trim().split('\n').slice(-limit);
|
||||
return lines.map((line) => JSON.parse(line));
|
||||
} catch (error) {
|
||||
console.error('Error reading logs:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get moderation statistics
|
||||
*/
|
||||
getStats(): any {
|
||||
const logs = this.getRecentLogs(1000);
|
||||
|
||||
const stats = {
|
||||
total: logs.length,
|
||||
approved: logs.filter((log) => log.action === 'APPROVE').length,
|
||||
flagged: logs.filter((log) => log.action === 'FLAG_REVIEW').length,
|
||||
rejected: logs.filter((log) => log.action === 'AUTO_REJECT').length,
|
||||
byCategory: {} as Record<string, number>,
|
||||
};
|
||||
|
||||
// Count by category
|
||||
logs.forEach((log) => {
|
||||
Object.entries(log.categories).forEach(([category, detection]: [string, any]) => {
|
||||
if (detection.detected) {
|
||||
stats.byCategory[category] = (stats.byCategory[category] || 0) + 1;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return stats;
|
||||
}
|
||||
}
|
||||
|
||||
export const moderationLogger = new ModerationLogger();
|
||||
117
dmtp/server/src/utils/queue-logger.js
Normal file
117
dmtp/server/src/utils/queue-logger.js
Normal file
@@ -0,0 +1,117 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.queueLogger = exports.QueueLogger = void 0;
|
||||
const fs = __importStar(require("fs"));
|
||||
const path = __importStar(require("path"));
|
||||
class QueueLogger {
|
||||
constructor(logDirectory = './logs') {
|
||||
if (!fs.existsSync(logDirectory)) {
|
||||
fs.mkdirSync(logDirectory, { recursive: true });
|
||||
}
|
||||
this.logFile = path.join(logDirectory, 'queue.log');
|
||||
this.metricsFile = path.join(logDirectory, 'queue-metrics.json');
|
||||
}
|
||||
log(level, message, data) {
|
||||
const logEntry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
level,
|
||||
message,
|
||||
...(data && { data }),
|
||||
};
|
||||
const logLine = JSON.stringify(logEntry) + '\n';
|
||||
fs.appendFileSync(this.logFile, logLine, 'utf8');
|
||||
// Console log with colors
|
||||
const emoji = {
|
||||
info: '📘',
|
||||
warn: '⚠️',
|
||||
error: '❌',
|
||||
}[level];
|
||||
console.log(`${emoji} [${level.toUpperCase()}] ${message}`, data || '');
|
||||
}
|
||||
logJobStart(jobId, jobData) {
|
||||
this.log('info', `Job started: ${jobId}`, { jobData });
|
||||
}
|
||||
logJobComplete(jobId, duration, result) {
|
||||
this.log('info', `Job completed: ${jobId}`, { duration: `${duration}ms`, result });
|
||||
this.updateMetrics('completed', duration);
|
||||
}
|
||||
logJobFailed(jobId, error, attemptsMade) {
|
||||
this.log('error', `Job failed: ${jobId}`, {
|
||||
error: error.message,
|
||||
attemptsMade,
|
||||
stack: error.stack,
|
||||
});
|
||||
this.updateMetrics('failed');
|
||||
}
|
||||
updateMetrics(type, duration) {
|
||||
try {
|
||||
let metrics = { completed: 0, failed: 0, totalDuration: 0, avgDuration: 0 };
|
||||
if (fs.existsSync(this.metricsFile)) {
|
||||
const content = fs.readFileSync(this.metricsFile, 'utf8');
|
||||
metrics = JSON.parse(content);
|
||||
}
|
||||
if (type === 'completed') {
|
||||
metrics.completed++;
|
||||
if (duration) {
|
||||
metrics.totalDuration += duration;
|
||||
metrics.avgDuration = metrics.totalDuration / metrics.completed;
|
||||
}
|
||||
}
|
||||
else {
|
||||
metrics.failed++;
|
||||
}
|
||||
metrics.lastUpdated = new Date().toISOString();
|
||||
fs.writeFileSync(this.metricsFile, JSON.stringify(metrics, null, 2));
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to update metrics:', error);
|
||||
}
|
||||
}
|
||||
getMetrics() {
|
||||
try {
|
||||
if (fs.existsSync(this.metricsFile)) {
|
||||
const content = fs.readFileSync(this.metricsFile, 'utf8');
|
||||
return JSON.parse(content);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
console.error('Failed to read metrics:', error);
|
||||
}
|
||||
return { completed: 0, failed: 0, avgDuration: 0 };
|
||||
}
|
||||
}
|
||||
exports.QueueLogger = QueueLogger;
|
||||
exports.queueLogger = new QueueLogger();
|
||||
95
dmtp/server/src/utils/queue-logger.ts
Normal file
95
dmtp/server/src/utils/queue-logger.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
export class QueueLogger {
|
||||
private logFile: string;
|
||||
private metricsFile: string;
|
||||
|
||||
constructor(logDirectory: string = './logs') {
|
||||
if (!fs.existsSync(logDirectory)) {
|
||||
fs.mkdirSync(logDirectory, { recursive: true });
|
||||
}
|
||||
|
||||
this.logFile = path.join(logDirectory, 'queue.log');
|
||||
this.metricsFile = path.join(logDirectory, 'queue-metrics.json');
|
||||
}
|
||||
|
||||
log(level: 'info' | 'warn' | 'error', message: string, data?: any): void {
|
||||
const logEntry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
level,
|
||||
message,
|
||||
...(data && { data }),
|
||||
};
|
||||
|
||||
const logLine = JSON.stringify(logEntry) + '\n';
|
||||
fs.appendFileSync(this.logFile, logLine, 'utf8');
|
||||
|
||||
// Console log with colors
|
||||
const emoji = {
|
||||
info: '📘',
|
||||
warn: '⚠️',
|
||||
error: '❌',
|
||||
}[level];
|
||||
|
||||
console.log(`${emoji} [${level.toUpperCase()}] ${message}`, data || '');
|
||||
}
|
||||
|
||||
logJobStart(jobId: string, jobData: any): void {
|
||||
this.log('info', `Job started: ${jobId}`, { jobData });
|
||||
}
|
||||
|
||||
logJobComplete(jobId: string, duration: number, result?: any): void {
|
||||
this.log('info', `Job completed: ${jobId}`, { duration: `${duration}ms`, result });
|
||||
this.updateMetrics('completed', duration);
|
||||
}
|
||||
|
||||
logJobFailed(jobId: string, error: any, attemptsMade: number): void {
|
||||
this.log('error', `Job failed: ${jobId}`, {
|
||||
error: error.message,
|
||||
attemptsMade,
|
||||
stack: error.stack,
|
||||
});
|
||||
this.updateMetrics('failed');
|
||||
}
|
||||
|
||||
private updateMetrics(type: 'completed' | 'failed', duration?: number): void {
|
||||
try {
|
||||
let metrics: any = { completed: 0, failed: 0, totalDuration: 0, avgDuration: 0 };
|
||||
|
||||
if (fs.existsSync(this.metricsFile)) {
|
||||
const content = fs.readFileSync(this.metricsFile, 'utf8');
|
||||
metrics = JSON.parse(content);
|
||||
}
|
||||
|
||||
if (type === 'completed') {
|
||||
metrics.completed++;
|
||||
if (duration) {
|
||||
metrics.totalDuration += duration;
|
||||
metrics.avgDuration = metrics.totalDuration / metrics.completed;
|
||||
}
|
||||
} else {
|
||||
metrics.failed++;
|
||||
}
|
||||
|
||||
metrics.lastUpdated = new Date().toISOString();
|
||||
fs.writeFileSync(this.metricsFile, JSON.stringify(metrics, null, 2));
|
||||
} catch (error) {
|
||||
console.error('Failed to update metrics:', error);
|
||||
}
|
||||
}
|
||||
|
||||
getMetrics(): any {
|
||||
try {
|
||||
if (fs.existsSync(this.metricsFile)) {
|
||||
const content = fs.readFileSync(this.metricsFile, 'utf8');
|
||||
return JSON.parse(content);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to read metrics:', error);
|
||||
}
|
||||
return { completed: 0, failed: 0, avgDuration: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
export const queueLogger = new QueueLogger();
|
||||
78
dmtp/server/src/utils/rate-limiter.js
Normal file
78
dmtp/server/src/utils/rate-limiter.js
Normal file
@@ -0,0 +1,78 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.RateLimiter = void 0;
|
||||
class RateLimiter {
|
||||
constructor(config) {
|
||||
this.requests = new Map();
|
||||
this.config = config;
|
||||
// Cleanup expired entries every minute
|
||||
setInterval(() => this.cleanup(), 60000);
|
||||
}
|
||||
async checkLimit(key = 'global') {
|
||||
const now = Date.now();
|
||||
const entry = this.requests.get(key);
|
||||
if (!entry || now > entry.resetTime) {
|
||||
// Create new entry
|
||||
this.requests.set(key, {
|
||||
count: 1,
|
||||
resetTime: now + this.config.windowMs,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
if (entry.count >= this.config.maxRequests) {
|
||||
const waitTime = entry.resetTime - now;
|
||||
throw new Error(`Rate limit exceeded. Try again in ${Math.ceil(waitTime / 1000)} seconds.`);
|
||||
}
|
||||
entry.count++;
|
||||
return true;
|
||||
}
|
||||
async waitForSlot(key = 'global') {
|
||||
try {
|
||||
await this.checkLimit(key);
|
||||
}
|
||||
catch (error) {
|
||||
const entry = this.requests.get(key);
|
||||
if (entry) {
|
||||
const waitTime = entry.resetTime - Date.now();
|
||||
await new Promise((resolve) => setTimeout(resolve, waitTime));
|
||||
await this.waitForSlot(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
cleanup() {
|
||||
const now = Date.now();
|
||||
for (const [key, entry] of this.requests.entries()) {
|
||||
if (now > entry.resetTime) {
|
||||
this.requests.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
reset(key) {
|
||||
if (key) {
|
||||
this.requests.delete(key);
|
||||
}
|
||||
else {
|
||||
this.requests.clear();
|
||||
}
|
||||
}
|
||||
getStatus(key = 'global') {
|
||||
const entry = this.requests.get(key);
|
||||
if (!entry) {
|
||||
return {
|
||||
remaining: this.config.maxRequests,
|
||||
resetTime: Date.now() + this.config.windowMs,
|
||||
};
|
||||
}
|
||||
if (Date.now() > entry.resetTime) {
|
||||
return {
|
||||
remaining: this.config.maxRequests,
|
||||
resetTime: Date.now() + this.config.windowMs,
|
||||
};
|
||||
}
|
||||
return {
|
||||
remaining: Math.max(0, this.config.maxRequests - entry.count),
|
||||
resetTime: entry.resetTime,
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.RateLimiter = RateLimiter;
|
||||
94
dmtp/server/src/utils/rate-limiter.ts
Normal file
94
dmtp/server/src/utils/rate-limiter.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { RateLimitConfig } from '../types/ai.types';
|
||||
|
||||
interface RateLimitEntry {
|
||||
count: number;
|
||||
resetTime: number;
|
||||
}
|
||||
|
||||
export class RateLimiter {
|
||||
private requests: Map<string, RateLimitEntry> = new Map();
|
||||
private config: RateLimitConfig;
|
||||
|
||||
constructor(config: RateLimitConfig) {
|
||||
this.config = config;
|
||||
|
||||
// Cleanup expired entries every minute
|
||||
setInterval(() => this.cleanup(), 60000);
|
||||
}
|
||||
|
||||
async checkLimit(key: string = 'global'): Promise<boolean> {
|
||||
const now = Date.now();
|
||||
const entry = this.requests.get(key);
|
||||
|
||||
if (!entry || now > entry.resetTime) {
|
||||
// Create new entry
|
||||
this.requests.set(key, {
|
||||
count: 1,
|
||||
resetTime: now + this.config.windowMs,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
if (entry.count >= this.config.maxRequests) {
|
||||
const waitTime = entry.resetTime - now;
|
||||
throw new Error(
|
||||
`Rate limit exceeded. Try again in ${Math.ceil(waitTime / 1000)} seconds.`
|
||||
);
|
||||
}
|
||||
|
||||
entry.count++;
|
||||
return true;
|
||||
}
|
||||
|
||||
async waitForSlot(key: string = 'global'): Promise<void> {
|
||||
try {
|
||||
await this.checkLimit(key);
|
||||
} catch (error) {
|
||||
const entry = this.requests.get(key);
|
||||
if (entry) {
|
||||
const waitTime = entry.resetTime - Date.now();
|
||||
await new Promise((resolve) => setTimeout(resolve, waitTime));
|
||||
await this.waitForSlot(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private cleanup(): void {
|
||||
const now = Date.now();
|
||||
for (const [key, entry] of this.requests.entries()) {
|
||||
if (now > entry.resetTime) {
|
||||
this.requests.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
reset(key?: string): void {
|
||||
if (key) {
|
||||
this.requests.delete(key);
|
||||
} else {
|
||||
this.requests.clear();
|
||||
}
|
||||
}
|
||||
|
||||
getStatus(key: string = 'global'): { remaining: number; resetTime: number } | null {
|
||||
const entry = this.requests.get(key);
|
||||
if (!entry) {
|
||||
return {
|
||||
remaining: this.config.maxRequests,
|
||||
resetTime: Date.now() + this.config.windowMs,
|
||||
};
|
||||
}
|
||||
|
||||
if (Date.now() > entry.resetTime) {
|
||||
return {
|
||||
remaining: this.config.maxRequests,
|
||||
resetTime: Date.now() + this.config.windowMs,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
remaining: Math.max(0, this.config.maxRequests - entry.count),
|
||||
resetTime: entry.resetTime,
|
||||
};
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user