Update timezone settings from Asia/Jakarta to Asia/Dili across configuration files and codebase. Enhance logging for crawling events and improve error handling in the CrawlingService. Add a new workspace configuration file for easier project management.
This commit is contained in:
@ -1,37 +1,36 @@
|
||||
{
|
||||
"app": {
|
||||
"name": "SVC-HCM-CRAWLER",
|
||||
"description": "Hikvision Device Crawling Service",
|
||||
"version": "1.0.0",
|
||||
"env": "Development"
|
||||
},
|
||||
"crawler": {
|
||||
"interval": "*/1 * * * *",
|
||||
"maxResults": 10,
|
||||
"timeout": 10000
|
||||
},
|
||||
"database": {
|
||||
"hcm": {
|
||||
"engine": "mysql",
|
||||
"host": "127.0.0.1",
|
||||
"port": "3306",
|
||||
"username": "root",
|
||||
"password": "r00t@dm1n05",
|
||||
"database": "tt_hcm",
|
||||
"logging": false,
|
||||
"sync": false
|
||||
"app": {
|
||||
"name": "SVC-HCM-CRAWLER",
|
||||
"description": "Hikvision Device Crawling Service",
|
||||
"version": "1.0.0",
|
||||
"env": "Development"
|
||||
},
|
||||
"crawler": {
|
||||
"interval": "*/1 * * * *",
|
||||
"maxResults": 10,
|
||||
"timeout": 10000
|
||||
},
|
||||
"database": {
|
||||
"hcm": {
|
||||
"engine": "mysql",
|
||||
"host": "127.0.0.1",
|
||||
"port": "3306",
|
||||
"username": "root",
|
||||
"password": "r00t@dm1n05",
|
||||
"database": "tt_hcm",
|
||||
"logging": false,
|
||||
"sync": false
|
||||
}
|
||||
},
|
||||
"bridge": {
|
||||
"host": "127.0.0.1",
|
||||
"port": "3000",
|
||||
"endpoint": "/api/dooraccess/logs",
|
||||
"timeout": 30000
|
||||
},
|
||||
"hikvision": {
|
||||
"defaultUsername": "admin",
|
||||
"defaultPassword": "Passwordhik_1",
|
||||
"defaultPort": 80
|
||||
}
|
||||
},
|
||||
"bridge": {
|
||||
"host": "127.0.0.1",
|
||||
"port": "3000",
|
||||
"endpoint": "/api/dooraccess/logs",
|
||||
"timeout": 30000
|
||||
},
|
||||
"hikvision": {
|
||||
"defaultUsername": "admin",
|
||||
"defaultPassword": "Passwordhik_1",
|
||||
"defaultPort": 80
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,37 +1,36 @@
|
||||
{
|
||||
"app": {
|
||||
"name": "SVC-HCM-CRAWLER",
|
||||
"description": "Hikvision Device Crawling Service",
|
||||
"version": "1.0.0",
|
||||
"env": "Production"
|
||||
},
|
||||
"crawler": {
|
||||
"interval": "*/1 * * * *",
|
||||
"maxResults": 10,
|
||||
"timeout": 10000
|
||||
},
|
||||
"database": {
|
||||
"hcm": {
|
||||
"engine": "mysql",
|
||||
"host": "127.0.0.1",
|
||||
"port": "3306",
|
||||
"username": "apphcm",
|
||||
"password": "$ppHCMTT#2024",
|
||||
"database": "dbhcm",
|
||||
"logging": false,
|
||||
"sync": false
|
||||
"app": {
|
||||
"name": "SVC-HCM-CRAWLER",
|
||||
"description": "Hikvision Device Crawling Service",
|
||||
"version": "1.0.0",
|
||||
"env": "Production"
|
||||
},
|
||||
"crawler": {
|
||||
"interval": "*/1 * * * *",
|
||||
"maxResults": 10,
|
||||
"timeout": 10000
|
||||
},
|
||||
"database": {
|
||||
"hcm": {
|
||||
"engine": "mysql",
|
||||
"host": "127.0.0.1",
|
||||
"port": "3306",
|
||||
"username": "apphcm",
|
||||
"password": "$ppHCMTT#2024",
|
||||
"database": "dbhcm",
|
||||
"logging": false,
|
||||
"sync": false
|
||||
}
|
||||
},
|
||||
"bridge": {
|
||||
"host": "127.0.0.1",
|
||||
"port": "3000",
|
||||
"endpoint": "/api/dooraccess/logs",
|
||||
"timeout": 30000
|
||||
},
|
||||
"hikvision": {
|
||||
"defaultUsername": "admin",
|
||||
"defaultPassword": "tt#2025Timor",
|
||||
"defaultPort": 80
|
||||
}
|
||||
},
|
||||
"bridge": {
|
||||
"host": "127.0.0.1",
|
||||
"port": "3000",
|
||||
"endpoint": "/api/dooraccess/logs",
|
||||
"timeout": 30000
|
||||
},
|
||||
"hikvision": {
|
||||
"defaultUsername": "admin",
|
||||
"defaultPassword": "Passwordhik_1",
|
||||
"defaultPort": 80
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -12,17 +12,16 @@ module.exports = {
|
||||
log_date_format: "YYYY-MM-DD HH:mm:ss Z",
|
||||
merge_logs: true,
|
||||
env: {
|
||||
NODE_ENV: "default",
|
||||
TZ: "Asia/Jakarta"
|
||||
NODE_ENV: "development",
|
||||
TZ: "Asia/Dili",
|
||||
},
|
||||
env_production: {
|
||||
NODE_ENV: "prod",
|
||||
TZ: "Asia/Jakarta"
|
||||
TZ: "Asia/Dili",
|
||||
},
|
||||
autorestart: true,
|
||||
max_restarts: 10,
|
||||
min_uptime: "10s"
|
||||
}
|
||||
]
|
||||
min_uptime: "10s",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
"exec": "ts-node src/index.ts",
|
||||
"env": {
|
||||
"NODE_ENV": "default",
|
||||
"TZ": "Asia/Jakarta"
|
||||
"TZ": "Asia/Dili"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
89
package.json
89
package.json
@ -1,45 +1,46 @@
|
||||
{
|
||||
"name": "svc-hcm-crawler",
|
||||
"version": "1.0.0",
|
||||
"description": "Hikvision Device Crawling Service for HCM Bridge",
|
||||
"main": "dist/src/index.js",
|
||||
"scripts": {
|
||||
"start": "TZ='Asia/Jakarta' node dist/src/index.js",
|
||||
"dev": "nodemon",
|
||||
"build": "tsc",
|
||||
"pm2:start": "pm2 start ecosystem.config.js",
|
||||
"pm2:stop": "pm2 stop svc-hcm-crawler",
|
||||
"pm2:restart": "pm2 restart svc-hcm-crawler",
|
||||
"pm2:delete": "pm2 delete svc-hcm-crawler",
|
||||
"pm2:logs": "pm2 logs svc-hcm-crawler"
|
||||
},
|
||||
"keywords": [
|
||||
"hikvision",
|
||||
"crawler",
|
||||
"hcm"
|
||||
],
|
||||
"author": "STS",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"axios": "^1.7.8",
|
||||
"config": "^3.3.12",
|
||||
"entity": "file:../entity",
|
||||
"moment-timezone": "~0.5.46",
|
||||
"mysql2": "^3.12.0",
|
||||
"node-cron": "^4.1.0",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"tslog": "^4.9.3",
|
||||
"typeorm": "^0.3.28",
|
||||
"typescript": "^5.9.3",
|
||||
"uuid": "^11.0.3",
|
||||
"xml2js": "^0.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"nodemon": "^3.0.0",
|
||||
"ts-node": "^10.9.0"
|
||||
}
|
||||
}
|
||||
|
||||
"name": "svc-hcm-crawler",
|
||||
"version": "1.0.0",
|
||||
"description": "Hikvision Device Crawling Service for HCM Bridge",
|
||||
"main": "dist/src/index.js",
|
||||
"scripts": {
|
||||
"start": "TZ='Asia/Dili' node dist/src/index.js",
|
||||
"dev": "nodemon",
|
||||
"build": "tsc",
|
||||
"pm2:start": "pm2 start ecosystem.config.js",
|
||||
"pm2:start:prod": "pm2 start ecosystem.config.js --env production",
|
||||
"pm2:stop": "pm2 stop svc-hcm-crawler",
|
||||
"pm2:restart": "pm2 restart svc-hcm-crawler",
|
||||
"pm2:restart:prod": "pm2 restart ecosystem.config.js --env production",
|
||||
"pm2:delete": "pm2 delete svc-hcm-crawler",
|
||||
"pm2:logs": "pm2 logs svc-hcm-crawler"
|
||||
},
|
||||
"keywords": [
|
||||
"hikvision",
|
||||
"crawler",
|
||||
"hcm"
|
||||
],
|
||||
"author": "STS",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@types/xml2js": "^0.4.14",
|
||||
"axios": "^1.7.8",
|
||||
"config": "^3.3.12",
|
||||
"entity": "file:../entity",
|
||||
"moment-timezone": "~0.5.46",
|
||||
"mysql2": "^3.12.0",
|
||||
"node-cron": "^4.1.0",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"tslog": "^4.9.3",
|
||||
"typeorm": "^0.3.28",
|
||||
"typescript": "^5.9.3",
|
||||
"uuid": "^11.0.3",
|
||||
"xml2js": "^0.6.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/node-cron": "^3.0.11",
|
||||
"nodemon": "^3.0.0",
|
||||
"ts-node": "^10.9.0"
|
||||
}
|
||||
}
|
||||
@ -27,13 +27,12 @@ export class OrmHelper {
|
||||
entities: entities,
|
||||
subscribers: [],
|
||||
migrations: [],
|
||||
extra: {
|
||||
query: "SET TIMEZONE='Asia/Jakarta';",
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
await OrmHelper.DB.initialize();
|
||||
// Set timezone for MySQL connection (Asia/Dili = UTC+9)
|
||||
await OrmHelper.DB.query("SET time_zone = '+09:00'");
|
||||
log.info("Database initialized successfully");
|
||||
} catch (error: any) {
|
||||
log.error("Database initialization failed:", error);
|
||||
@ -41,4 +40,3 @@ export class OrmHelper {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
107
src/index.ts
107
src/index.ts
@ -6,7 +6,7 @@ import { OrmHelper } from "./helpers/orm";
|
||||
import { CrawlingService } from "./services/crawling";
|
||||
|
||||
// Set timezone
|
||||
moment.tz.setDefault("Asia/Jakarta");
|
||||
moment.tz.setDefault("Asia/Dili");
|
||||
|
||||
const log: Logger<ILogObj> = new Logger({
|
||||
name: "[CrawlerIndex]",
|
||||
@ -38,35 +38,89 @@ async function startCrawlingService() {
|
||||
// Start cron job
|
||||
const cronInterval = config.get("crawler.interval");
|
||||
log.info(`Starting crawling service with interval: ${cronInterval}`);
|
||||
log.info(`Timezone: Asia/Dili`);
|
||||
|
||||
const crawlingJob = cron.schedule(cronInterval, async () => {
|
||||
if (isCrawlingRunning) {
|
||||
log.warn("Crawling service is still running. Skipping this schedule.");
|
||||
return;
|
||||
// Schedule cron job with timezone support
|
||||
// node-cron v3+ supports timezone option
|
||||
const crawlingJob = cron.schedule(
|
||||
cronInterval,
|
||||
async () => {
|
||||
const scheduleTime = moment()
|
||||
.tz("Asia/Dili")
|
||||
.format("YYYY-MM-DD HH:mm:ss");
|
||||
const utcTime = new Date().toISOString();
|
||||
|
||||
if (isCrawlingRunning) {
|
||||
log.warn(
|
||||
`[Cron] ⚠️ Crawling service is still running. Skipping schedule at ${scheduleTime} (UTC: ${utcTime})`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
log.info(`[Cron] ⏰ ========== CRON TRIGGERED ==========`);
|
||||
log.info(`[Cron] ⏰ Schedule time: ${scheduleTime} (UTC: ${utcTime})`);
|
||||
isCrawlingRunning = true;
|
||||
|
||||
try {
|
||||
await CrawlingService.runCrawling();
|
||||
const completeTime = moment()
|
||||
.tz("Asia/Dili")
|
||||
.format("YYYY-MM-DD HH:mm:ss");
|
||||
log.info(`[Cron] ✅ Crawling service completed at ${completeTime}`);
|
||||
} catch (error) {
|
||||
const errorTime = moment()
|
||||
.tz("Asia/Dili")
|
||||
.format("YYYY-MM-DD HH:mm:ss");
|
||||
log.error(
|
||||
`[Cron] ❌ Crawling service failed at ${errorTime}:`,
|
||||
error
|
||||
);
|
||||
} finally {
|
||||
isCrawlingRunning = false;
|
||||
log.info(`[Cron] ==========================================`);
|
||||
}
|
||||
},
|
||||
{
|
||||
timezone: "Asia/Dili",
|
||||
}
|
||||
|
||||
log.info(`Running crawling service at: ${new Date().toLocaleString()}`);
|
||||
isCrawlingRunning = true;
|
||||
|
||||
try {
|
||||
await CrawlingService.runCrawling();
|
||||
log.info(`Crawling service completed at: ${new Date().toLocaleString()}`);
|
||||
} catch (error) {
|
||||
log.error("Crawling service failed:", error);
|
||||
} finally {
|
||||
isCrawlingRunning = false;
|
||||
}
|
||||
});
|
||||
|
||||
log.info(
|
||||
`Crawling service started. App: ${config.get("app.name")} v${config.get("app.version")}`
|
||||
);
|
||||
log.info(`Environment: ${config.get("app.env")}`);
|
||||
log.info(`Cron schedule: ${cronInterval}`);
|
||||
|
||||
// Run immediately on startup (optional)
|
||||
// Uncomment the line below if you want to run crawling immediately on startup
|
||||
// await CrawlingService.runCrawling();
|
||||
// Verify cron job is scheduled
|
||||
if (crawlingJob) {
|
||||
const nextRun = moment()
|
||||
.tz("Asia/Dili")
|
||||
.add(1, "minute")
|
||||
.startOf("minute")
|
||||
.format("YYYY-MM-DD HH:mm:ss");
|
||||
const currentTime = moment()
|
||||
.tz("Asia/Dili")
|
||||
.format("YYYY-MM-DD HH:mm:ss");
|
||||
|
||||
log.info("═══════════════════════════════════════════════════════════");
|
||||
log.info(`[Cron] ✅ Cron job scheduled successfully`);
|
||||
log.info(
|
||||
`[Cron] App: ${config.get("app.name")} v${config.get("app.version")}`
|
||||
);
|
||||
log.info(`[Cron] Environment: ${config.get("app.env")}`);
|
||||
log.info(`[Cron] Schedule pattern: ${cronInterval} (every minute)`);
|
||||
log.info(`[Cron] Timezone: Asia/Dili`);
|
||||
log.info(`[Cron] Current time: ${currentTime}`);
|
||||
log.info(`[Cron] Next run scheduled at: ${nextRun}`);
|
||||
log.info("═══════════════════════════════════════════════════════════");
|
||||
} else {
|
||||
log.error("[Cron] ❌ Failed to schedule cron job");
|
||||
throw new Error("Failed to schedule cron job");
|
||||
}
|
||||
|
||||
// Run immediately on startup to verify everything works
|
||||
log.info("[Cron] Running initial crawl on startup...");
|
||||
try {
|
||||
await CrawlingService.runCrawling();
|
||||
log.info("[Cron] ✅ Initial crawl completed successfully");
|
||||
} catch (error) {
|
||||
log.error("[Cron] ❌ Initial crawl failed:", error);
|
||||
// Don't exit - let the cron job continue
|
||||
}
|
||||
} catch (error) {
|
||||
log.error("Failed to start crawling service:", error);
|
||||
process.exit(1);
|
||||
@ -106,4 +160,3 @@ process.on("uncaughtException", (error) => {
|
||||
|
||||
// Start the service
|
||||
startCrawlingService();
|
||||
|
||||
|
||||
@ -945,16 +945,26 @@ export class CrawlingService {
|
||||
const url = `http://${deviceIp}:${port}/ISAPI/AccessControl/AcsEvent?format=json`;
|
||||
|
||||
// Get device MAC address (cache it for subsequent requests)
|
||||
log.info(`[Crawl Device] Fetching MAC address for device ${deviceIp}...`);
|
||||
console.log(
|
||||
`[CrawlingService] Getting MAC address for device ${deviceIp}...`
|
||||
);
|
||||
|
||||
const macStartTime = Date.now();
|
||||
const macAddress = await this.getDeviceInfo(deviceIp, username, password);
|
||||
const macDuration = Date.now() - macStartTime;
|
||||
|
||||
if (!macAddress) {
|
||||
log.warn(
|
||||
`[Crawl Device] ⚠️ Could not get MAC address for device ${deviceIp} (took ${macDuration}ms). Events will be saved without MAC address.`
|
||||
);
|
||||
console.warn(
|
||||
`[CrawlingService] Warning: Could not get MAC address for device ${deviceIp}. Events will be saved without MAC address.`
|
||||
);
|
||||
} else {
|
||||
log.info(
|
||||
`[Crawl Device] ✓ MAC address retrieved: ${macAddress} for device ${deviceIp} (took ${macDuration}ms)`
|
||||
);
|
||||
console.log(
|
||||
`[CrawlingService] Successfully retrieved MAC address ${macAddress} for device ${deviceIp}`
|
||||
);
|
||||
@ -973,37 +983,75 @@ export class CrawlingService {
|
||||
};
|
||||
|
||||
log.info(
|
||||
`Crawling device ${deviceIp} from position ${state.lastPosition}`
|
||||
`[Crawl Device] Starting crawl for device ${deviceIp} from position ${state.lastPosition} (SearchID: ${state.searchID})`
|
||||
);
|
||||
|
||||
// Make API call with digest auth
|
||||
log.info(
|
||||
`[Crawl Device] Making API request to ${url} with maxResults: ${requestPayload.AcsEventCond.maxResults}`
|
||||
);
|
||||
console.log(`[CrawlingService] Making request to ${url}`);
|
||||
|
||||
const requestStartTime = Date.now();
|
||||
const response: HikvisionResponse = await this.makeDigestRequest(
|
||||
url,
|
||||
username,
|
||||
password,
|
||||
requestPayload
|
||||
);
|
||||
const requestDuration = Date.now() - requestStartTime;
|
||||
|
||||
log.info(
|
||||
`[Crawl Device] API request completed in ${requestDuration}ms for device ${deviceIp}`
|
||||
);
|
||||
|
||||
if (!response || !response.AcsEvent || !response.AcsEvent.InfoList) {
|
||||
log.warn(`No events found for device ${deviceIp}`);
|
||||
log.warn(
|
||||
`[Crawl Device] ⚠️ No events found for device ${deviceIp} - Response status: ${
|
||||
response?.AcsEvent?.responseStatusStrg || "N/A"
|
||||
}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const events = response.AcsEvent.InfoList;
|
||||
log.info(`Found ${events.length} events from device ${deviceIp}`);
|
||||
log.info(
|
||||
`[Crawl Device] Found ${events.length} event(s) from device ${deviceIp} - Total matches: ${response.AcsEvent.totalMatches}, Status: ${response.AcsEvent.responseStatusStrg}`
|
||||
);
|
||||
|
||||
// Process each event
|
||||
log.info(
|
||||
`[Crawl Device] Processing ${
|
||||
events.length
|
||||
} event(s) from device ${deviceIp} (MAC: ${macAddress || "NONE"})`
|
||||
);
|
||||
console.log(
|
||||
`[CrawlingService] Processing ${
|
||||
events.length
|
||||
} events for device ${deviceIp} with MAC: ${macAddress || "NONE"}`
|
||||
);
|
||||
for (const event of events) {
|
||||
|
||||
let successCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
for (let i = 0; i < events.length; i++) {
|
||||
const event = events[i];
|
||||
try {
|
||||
console.log(
|
||||
`[CrawlingService] Mapping event for employee ${event.employeeNoString} from device ${deviceIp}`
|
||||
log.info(
|
||||
`[Crawl Device] Processing event ${i + 1}/${
|
||||
events.length
|
||||
} - Employee: ${
|
||||
event.employeeNoString
|
||||
}, Device: ${deviceIp}, Time: ${event.time}, Status: ${
|
||||
event.attendanceStatus
|
||||
}`
|
||||
);
|
||||
console.log(
|
||||
`[CrawlingService] Mapping event ${i + 1}/${
|
||||
events.length
|
||||
} for employee ${event.employeeNoString} from device ${deviceIp}`
|
||||
);
|
||||
|
||||
// Map to webhook format (now async to get employee accessdoorid)
|
||||
const webhookEvent = await this.mapHikvisionToWebhook(
|
||||
event,
|
||||
@ -1011,93 +1059,107 @@ export class CrawlingService {
|
||||
macAddress
|
||||
);
|
||||
|
||||
log.info(
|
||||
`[Crawl Device] Mapped event - EmployeeID: ${
|
||||
webhookEvent.AccessControllerEvent?.employeeNoString
|
||||
}, DeviceName: ${
|
||||
webhookEvent.AccessControllerEvent?.deviceName
|
||||
}, MAC: ${webhookEvent.macAddress || "EMPTY"}, DateTime: ${
|
||||
webhookEvent.dateTime
|
||||
}`
|
||||
);
|
||||
console.log(
|
||||
`[CrawlingService] Sending event to internal endpoint. MAC in payload: ${
|
||||
`[CrawlingService] Sending event ${i + 1}/${
|
||||
events.length
|
||||
} to internal endpoint. MAC in payload: ${
|
||||
webhookEvent.macAddress || "EMPTY"
|
||||
}`
|
||||
);
|
||||
|
||||
// Call internal endpoint
|
||||
await this.sendToInternalEndpoint(webhookEvent);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[CrawlingService] Error processing event from device ${deviceIp}:`,
|
||||
error
|
||||
successCount++;
|
||||
} catch (error: any) {
|
||||
errorCount++;
|
||||
const errorMsg = error?.message || String(error);
|
||||
log.error(
|
||||
`[Crawl Device] ✗ Error processing event ${i + 1}/${
|
||||
events.length
|
||||
} from device ${deviceIp} - Employee: ${
|
||||
event.employeeNoString
|
||||
}, Time: ${event.time}, Error: ${errorMsg}`
|
||||
);
|
||||
console.error(
|
||||
`[CrawlingService] Error processing event ${i + 1}/${
|
||||
events.length
|
||||
} from device ${deviceIp}:`,
|
||||
errorMsg
|
||||
);
|
||||
log.error(`Error processing event from device ${deviceIp}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
log.info(
|
||||
`[Crawl Device] Completed processing ${events.length} event(s) from device ${deviceIp} - Success: ${successCount}, Errors: ${errorCount}`
|
||||
);
|
||||
|
||||
// Update position for next crawl
|
||||
state.lastPosition += events.length;
|
||||
|
||||
log.info(
|
||||
`[Crawl Device] Updated crawl position for device ${deviceIp} - New position: ${state.lastPosition}, Total matches: ${response.AcsEvent.totalMatches}, Status: ${response.AcsEvent.responseStatusStrg}`
|
||||
);
|
||||
|
||||
// If there are more matches, we'll continue in next crawl
|
||||
if (response.AcsEvent.responseStatusStrg === "MORE") {
|
||||
log.info(
|
||||
`More events available for device ${deviceIp}. Total: ${response.AcsEvent.totalMatches}`
|
||||
`[Crawl Device] ⏭️ More events available for device ${deviceIp} - Total: ${response.AcsEvent.totalMatches}, Current position: ${state.lastPosition}`
|
||||
);
|
||||
} else {
|
||||
// Reset position if no more events
|
||||
log.info(
|
||||
`[Crawl Device] ✅ All events processed for device ${deviceIp} - Resetting position to 0`
|
||||
);
|
||||
state.lastPosition = 0;
|
||||
state.searchID = uuidv4();
|
||||
}
|
||||
} catch (error) {
|
||||
log.error(`Error crawling device ${deviceIp}:`, error);
|
||||
} catch (error: any) {
|
||||
const errorMsg = error?.message || String(error);
|
||||
log.error(
|
||||
`[Crawl Device] ❌ Error crawling device ${deviceIp}: ${errorMsg}`
|
||||
);
|
||||
if (error?.response) {
|
||||
log.error(
|
||||
`[Crawl Device] Response details - Status: ${
|
||||
error.response.status
|
||||
}, Data: ${JSON.stringify(error.response.data)}`
|
||||
);
|
||||
}
|
||||
// Reset state on error
|
||||
if (crawlingState[deviceIp]) {
|
||||
log.info(
|
||||
`[Crawl Device] Resetting crawl state for device ${deviceIp} due to error`
|
||||
);
|
||||
crawlingState[deviceIp].lastPosition = 0;
|
||||
crawlingState[deviceIp].searchID = uuidv4();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if event already exists in database (duplicate check)
|
||||
*/
|
||||
static async isDuplicateEvent(eventData: any): Promise<boolean> {
|
||||
try {
|
||||
const accessdoorid = parseInt(
|
||||
eventData.AccessControllerEvent?.employeeNoString || "0"
|
||||
);
|
||||
const datelogs = moment.parseZone(eventData.dateTime).toDate();
|
||||
const devicename = eventData.AccessControllerEvent?.deviceName || "";
|
||||
const macaddress = eventData.macAddress || "";
|
||||
|
||||
// Check if a record with the same accessdoorid, datelogs (within 1 second), devicename, and macaddress exists
|
||||
const existing = await OrmHelper.DB.query(
|
||||
`
|
||||
SELECT _idx
|
||||
FROM tbl_attendancedoorlogs
|
||||
WHERE accessdoorid = ?
|
||||
AND devicename = ?
|
||||
AND macaddress = ?
|
||||
AND ABS(TIMESTAMPDIFF(SECOND, datelogs, ?)) <= 1
|
||||
AND isdeleted = 0
|
||||
LIMIT 1
|
||||
`,
|
||||
[accessdoorid, devicename, macaddress, datelogs]
|
||||
);
|
||||
|
||||
return existing && existing.length > 0;
|
||||
} catch (error) {
|
||||
log.error("Error checking duplicate event:", error);
|
||||
// If there's an error checking, assume it's not a duplicate to avoid blocking valid events
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send event to internal endpoint
|
||||
*/
|
||||
static async sendToInternalEndpoint(eventData: any): Promise<void> {
|
||||
const employeeId =
|
||||
eventData.AccessControllerEvent?.employeeNoString || "N/A";
|
||||
const deviceName = eventData.AccessControllerEvent?.deviceName || "N/A";
|
||||
const dateTime = eventData.dateTime || "N/A";
|
||||
const macAddress = eventData.macAddress || "N/A";
|
||||
const ipAddress = eventData.ipAddress || "N/A";
|
||||
|
||||
try {
|
||||
// Check for duplicates before sending
|
||||
const isDuplicate = await this.isDuplicateEvent(eventData);
|
||||
if (isDuplicate) {
|
||||
log.info(
|
||||
`Skipping duplicate event: accessdoorid=${eventData.AccessControllerEvent?.employeeNoString}, datelogs=${eventData.dateTime}, devicename=${eventData.AccessControllerEvent?.deviceName}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
log.info(
|
||||
`[Send Event] Preparing to send event - EmployeeID: ${employeeId}, Device: ${deviceName}, DateTime: ${dateTime}, IP: ${ipAddress}, MAC: ${macAddress}`
|
||||
);
|
||||
|
||||
const bridgeHost = config.get("bridge.host");
|
||||
const bridgePort = config.get("bridge.port");
|
||||
@ -1106,7 +1168,12 @@ export class CrawlingService {
|
||||
|
||||
const url = `http://${bridgeHost}:${bridgePort}${bridgeEndpoint}`;
|
||||
|
||||
log.info(
|
||||
`[Send Event] Sending to bridge - URL: ${url}, EmployeeID: ${employeeId}, Device: ${deviceName}, DateTime: ${dateTime}`
|
||||
);
|
||||
|
||||
// Send as form data with event_log field (matching the webhook format)
|
||||
const startTime = Date.now();
|
||||
await axios.post(
|
||||
url,
|
||||
{ event_log: JSON.stringify(eventData) },
|
||||
@ -1117,10 +1184,23 @@ export class CrawlingService {
|
||||
},
|
||||
}
|
||||
);
|
||||
const duration = Date.now() - startTime;
|
||||
|
||||
log.info(`Successfully sent event to bridge endpoint: ${url}`);
|
||||
} catch (error) {
|
||||
log.error("Error sending to internal endpoint:", error);
|
||||
log.info(
|
||||
`[Send Event] ✅ SUCCESS - Sent to bridge in ${duration}ms - EmployeeID: ${employeeId}, Device: ${deviceName}, DateTime: ${dateTime}, IP: ${ipAddress}, MAC: ${macAddress}`
|
||||
);
|
||||
} catch (error: any) {
|
||||
const errorMsg = error?.message || String(error);
|
||||
const statusCode = error?.response?.status || "N/A";
|
||||
log.error(
|
||||
`[Send Event] ❌ FAILED - EmployeeID: ${employeeId}, Device: ${deviceName}, DateTime: ${dateTime}, IP: ${ipAddress}, MAC: ${macAddress}, Error: ${errorMsg}, Status: ${statusCode}`
|
||||
);
|
||||
if (error?.response?.data) {
|
||||
log.error(
|
||||
`[Send Event] Response data:`,
|
||||
JSON.stringify(error.response.data)
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@ -1129,67 +1209,138 @@ export class CrawlingService {
|
||||
* Run crawling for all devices that need it
|
||||
*/
|
||||
static async runCrawling(): Promise<void> {
|
||||
const startTime = Date.now();
|
||||
const timestamp = moment().format("YYYY-MM-DD HH:mm:ss");
|
||||
|
||||
try {
|
||||
log.info("═══════════════════════════════════════════════════════════");
|
||||
log.info(`[Crawling Service] 🚀 Starting crawling cycle at ${timestamp}`);
|
||||
log.info("═══════════════════════════════════════════════════════════");
|
||||
console.log("[CrawlingService] Starting crawling service...");
|
||||
log.info("Starting crawling service...");
|
||||
|
||||
// Check database connection
|
||||
if (!OrmHelper.DB || !OrmHelper.DB.isInitialized) {
|
||||
const errorMsg = "Database not initialized";
|
||||
console.error(`[CrawlingService] ${errorMsg}`);
|
||||
log.error(errorMsg);
|
||||
log.error(`[Crawling Service] ❌ ${errorMsg}`);
|
||||
throw new Error(errorMsg);
|
||||
}
|
||||
|
||||
log.info(`[Crawling Service] ✓ Database connection verified`);
|
||||
|
||||
const devices = await this.getCrawlingDevices();
|
||||
console.log(`[CrawlingService] Found ${devices.length} devices to crawl`);
|
||||
log.info(`Found ${devices.length} devices to crawl`);
|
||||
log.info(`[Crawling Service] Found ${devices.length} device(s) to crawl`);
|
||||
|
||||
if (devices.length === 0) {
|
||||
console.log(
|
||||
"[CrawlingService] No devices to crawl (check if devices have brand='HIKVISION' and flag=1)"
|
||||
);
|
||||
log.info("No devices to crawl");
|
||||
log.info(
|
||||
`[Crawling Service] ⚠️ No devices found - Check if devices have brand='HIKVISION' and (flag=1 OR iscrawling=1)`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Log device details
|
||||
devices.forEach((device) => {
|
||||
log.info(`[Crawling Service] Device list:`);
|
||||
devices.forEach((device, index) => {
|
||||
log.info(
|
||||
` ${index + 1}. IP: ${device.deviceip}, DeviceID: ${
|
||||
device.deviceid
|
||||
}, Location: ${device.location || "N/A"}, Flag: ${
|
||||
device.flag || "N/A"
|
||||
}, IsCrawling: ${device.iscrawling || "N/A"}`
|
||||
);
|
||||
console.log(
|
||||
`[CrawlingService] Device found: ${device.deviceip} (flag: ${device.flag}, iscrawling: ${device.iscrawling})`
|
||||
`[CrawlingService] Device ${index + 1}: ${device.deviceip} (flag: ${
|
||||
device.flag
|
||||
}, iscrawling: ${device.iscrawling})`
|
||||
);
|
||||
});
|
||||
|
||||
let totalDevicesProcessed = 0;
|
||||
let totalDevicesSuccess = 0;
|
||||
let totalDevicesError = 0;
|
||||
|
||||
// Crawl each device
|
||||
for (const device of devices) {
|
||||
for (let i = 0; i < devices.length; i++) {
|
||||
const device = devices[i];
|
||||
try {
|
||||
console.log(`[CrawlingService] Crawling device: ${device.deviceip}`);
|
||||
log.info(
|
||||
`[Crawling Service] ───────────────────────────────────────────────────`
|
||||
);
|
||||
log.info(
|
||||
`[Crawling Service] Processing device ${i + 1}/${devices.length}: ${
|
||||
device.deviceip
|
||||
}`
|
||||
);
|
||||
console.log(
|
||||
`[CrawlingService] Crawling device ${i + 1}/${devices.length}: ${
|
||||
device.deviceip
|
||||
}`
|
||||
);
|
||||
|
||||
const deviceStartTime = Date.now();
|
||||
await this.crawlDevice(device);
|
||||
const deviceDuration = Date.now() - deviceStartTime;
|
||||
|
||||
totalDevicesProcessed++;
|
||||
totalDevicesSuccess++;
|
||||
|
||||
log.info(
|
||||
`[Crawling Service] ✓ Device ${device.deviceip} completed in ${deviceDuration}ms`
|
||||
);
|
||||
|
||||
// Add small delay between devices to avoid overwhelming
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
if (i < devices.length - 1) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||
}
|
||||
} catch (error: any) {
|
||||
totalDevicesProcessed++;
|
||||
totalDevicesError++;
|
||||
const errorMsg = error?.message || String(error);
|
||||
log.error(
|
||||
`[Crawling Service] ✗ Device ${device.deviceip} failed: ${errorMsg}`
|
||||
);
|
||||
console.error(
|
||||
`[CrawlingService] Error crawling device ${device.deviceip}:`,
|
||||
errorMsg
|
||||
);
|
||||
if (error?.response) {
|
||||
log.error(
|
||||
`[Crawling Service] Response status: ${
|
||||
error.response.status
|
||||
}, Response data: ${JSON.stringify(error.response.data)}`
|
||||
);
|
||||
console.error(
|
||||
`[CrawlingService] Response status: ${error.response.status}`,
|
||||
`Response data:`,
|
||||
JSON.stringify(error.response.data)
|
||||
);
|
||||
}
|
||||
log.error(`Error crawling device ${device.deviceip}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
const totalDuration = Date.now() - startTime;
|
||||
log.info("═══════════════════════════════════════════════════════════");
|
||||
log.info(
|
||||
`[Crawling Service] ✅ Crawling cycle completed in ${totalDuration}ms`
|
||||
);
|
||||
log.info(
|
||||
`[Crawling Service] Summary - Total: ${totalDevicesProcessed}, Success: ${totalDevicesSuccess}, Errors: ${totalDevicesError}`
|
||||
);
|
||||
log.info("═══════════════════════════════════════════════════════════");
|
||||
console.log("[CrawlingService] Crawling service completed");
|
||||
log.info("Crawling service completed");
|
||||
} catch (error: any) {
|
||||
const totalDuration = Date.now() - startTime;
|
||||
const errorMsg = error?.message || String(error);
|
||||
log.error("═══════════════════════════════════════════════════════════");
|
||||
log.error(
|
||||
`[Crawling Service] ❌ Fatal error after ${totalDuration}ms: ${errorMsg}`
|
||||
);
|
||||
log.error("═══════════════════════════════════════════════════════════");
|
||||
console.error("[CrawlingService] Error in crawling service:", errorMsg);
|
||||
log.error("Error in crawling service:", error);
|
||||
throw error; // Re-throw so the caller can handle it
|
||||
}
|
||||
}
|
||||
|
||||
17
src/services/tt-hcm.code-workspace
Normal file
17
src/services/tt-hcm.code-workspace
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"path": "../../../tt_hcm"
|
||||
},
|
||||
{
|
||||
"path": "../../../svc-hcm"
|
||||
},
|
||||
{
|
||||
"path": "../../../entity"
|
||||
},
|
||||
{
|
||||
"path": "../.."
|
||||
}
|
||||
],
|
||||
"settings": {}
|
||||
}
|
||||
Reference in New Issue
Block a user