Add database adapter system, production deployment configs, and new dashboard components

- Add PostgreSQL and SQLite database adapters with factory pattern
- Add migration script for SQLite to PostgreSQL
- Add production Dockerfiles and docker-compose configs
- Add deployment documentation and scripts
- Add BIA sync dashboard and matching service
- Add data completeness configuration and components
- Add new dashboard components (BusinessImportanceComparison, ComplexityDynamics, etc.)
- Update various services and routes
- Remove deprecated management-parameters.json and taxonomy files
This commit is contained in:
2026-01-14 00:38:40 +01:00
parent ca21b9538d
commit a7f8301196
73 changed files with 12878 additions and 2003 deletions

42
backend/Dockerfile.prod Normal file
View File

@@ -0,0 +1,42 @@
FROM node:20-alpine AS builder
WORKDIR /app
# Install dependencies
COPY package*.json ./
RUN npm ci --only=production && npm cache clean --force
# Copy source
COPY . .
# Build TypeScript
RUN npm run build
# Production stage
FROM node:20-alpine
WORKDIR /app
# Install only production dependencies
COPY package*.json ./
RUN npm ci --only=production && npm cache clean --force
# Copy built files
COPY --from=builder /app/dist ./dist
COPY --from=builder /app/src/generated ./src/generated
# Create data directory with proper permissions
RUN mkdir -p /app/data && chown -R node:node /app/data
# Switch to non-root user
USER node
# Expose port
EXPOSE 3001
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=40s --retries=3 \
CMD node -e "require('http').get('http://localhost:3001/health', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"
# Start production server
CMD ["node", "dist/index.js"]

View File

@@ -0,0 +1,132 @@
{
"metadata": {
"version": "2.0.0",
"description": "Configuration for Data Completeness Score fields",
"lastUpdated": "2026-01-12T22:11:20.047Z"
},
"categories": [
{
"id": "general",
"name": "General",
"description": "General application information fields",
"fields": [
{
"id": "status",
"name": "Status",
"fieldPath": "status",
"enabled": true
},
{
"id": "applicationFunctions",
"name": "ApplicationFunction",
"fieldPath": "applicationFunctions",
"enabled": true
},
{
"id": "businessImpactAnalyse",
"name": "Business Impact Analyse",
"fieldPath": "businessImpactAnalyse",
"enabled": true
},
{
"id": "hostingType",
"name": "Application Component Hosting Type",
"fieldPath": "hostingType",
"enabled": true
}
]
},
{
"id": "1768255689773-uskqbfesn",
"name": "Ownership",
"description": "",
"fields": [
{
"id": "organisation",
"name": "Organisation",
"fieldPath": "organisation",
"enabled": true
},
{
"id": "businessOwner",
"name": "Business Owner",
"fieldPath": "businessOwner",
"enabled": true
},
{
"id": "systemOwner",
"name": "System Owner",
"fieldPath": "systemOwner",
"enabled": true
},
{
"id": "functionalApplicationManagement",
"name": "Functional Application Management",
"fieldPath": "functionalApplicationManagement",
"enabled": true
},
{
"id": "technicalApplicationManagement",
"name": "Technical Application Management",
"fieldPath": "technicalApplicationManagement",
"enabled": true
},
{
"id": "supplierProduct",
"name": "Supplier Product",
"fieldPath": "supplierProduct",
"enabled": true
}
]
},
{
"id": "applicationManagement",
"name": "Application Management",
"description": "Application management classification fields",
"fields": [
{
"id": "governanceModel",
"name": "ICT Governance Model",
"fieldPath": "governanceModel",
"enabled": true
},
{
"id": "applicationType",
"name": "Application Management - Application Type",
"fieldPath": "applicationType",
"enabled": true
},
{
"id": "applicationManagementHosting",
"name": "Application Management - Hosting",
"fieldPath": "applicationManagementHosting",
"enabled": true
},
{
"id": "applicationManagementTAM",
"name": "Application Management - TAM",
"fieldPath": "applicationManagementTAM",
"enabled": true
},
{
"id": "dynamicsFactor",
"name": "Application Management - Dynamics Factor",
"fieldPath": "dynamicsFactor",
"enabled": true
},
{
"id": "complexityFactor",
"name": "Application Management - Complexity Factor",
"fieldPath": "complexityFactor",
"enabled": true
},
{
"id": "numberOfUsers",
"name": "Application Management - Number of Users",
"fieldPath": "numberOfUsers",
"enabled": true
}
]
}
]
}

BIN
backend/data/~$BIA.xlsx Normal file

Binary file not shown.

View File

@@ -2,12 +2,14 @@
"name": "zira-backend",
"version": "1.0.0",
"description": "ZiRA Classificatie Tool Backend",
"type": "module",
"main": "dist/index.js",
"scripts": {
"dev": "tsx watch src/index.ts",
"build": "tsc",
"start": "node dist/index.js",
"generate-schema": "tsx scripts/generate-schema.ts"
"generate-schema": "tsx scripts/generate-schema.ts",
"migrate:sqlite-to-postgres": "tsx scripts/migrate-sqlite-to-postgres.ts"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.32.1",
@@ -19,6 +21,7 @@
"express-rate-limit": "^7.4.1",
"helmet": "^8.0.0",
"openai": "^6.15.0",
"pg": "^8.13.1",
"winston": "^3.17.0",
"xlsx": "^0.18.5"
},
@@ -28,6 +31,7 @@
"@types/cors": "^2.8.17",
"@types/express": "^5.0.0",
"@types/node": "^22.9.0",
"@types/pg": "^8.11.10",
"@types/xlsx": "^0.0.35",
"tsx": "^4.19.2",
"typescript": "^5.6.3"

View File

@@ -0,0 +1,184 @@
/**
* Migration script: SQLite to PostgreSQL
*
* Migrates data from SQLite databases to PostgreSQL.
*
* Usage:
* DATABASE_URL=postgresql://user:pass@host:port/db tsx scripts/migrate-sqlite-to-postgres.ts
*/
import Database from 'better-sqlite3';
import { Pool } from 'pg';
import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
import * as fs from 'fs';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const SQLITE_CACHE_DB = join(__dirname, '../../data/cmdb-cache.db');
const SQLITE_CLASSIFICATIONS_DB = join(__dirname, '../../data/classifications.db');
async function migrate() {
const databaseUrl = process.env.DATABASE_URL;
if (!databaseUrl) {
console.error('Error: DATABASE_URL environment variable is required');
console.error('Example: DATABASE_URL=postgresql://user:pass@localhost:5432/cmdb');
process.exit(1);
}
console.log('Starting migration from SQLite to PostgreSQL...');
console.log(`PostgreSQL: ${databaseUrl.replace(/:[^:@]+@/, ':****@')}`);
const pg = new Pool({ connectionString: databaseUrl });
try {
// Test connection
await pg.query('SELECT 1');
console.log('✓ PostgreSQL connection successful');
// Migrate cache database
if (fs.existsSync(SQLITE_CACHE_DB)) {
console.log('\nMigrating cache database...');
await migrateCacheDatabase(pg);
} else {
console.log('\n⚠ Cache database not found, skipping...');
}
// Migrate classifications database
if (fs.existsSync(SQLITE_CLASSIFICATIONS_DB)) {
console.log('\nMigrating classifications database...');
await migrateClassificationsDatabase(pg);
} else {
console.log('\n⚠ Classifications database not found, skipping...');
}
console.log('\n✓ Migration completed successfully!');
} catch (error) {
console.error('\n✗ Migration failed:', error);
process.exit(1);
} finally {
await pg.end();
}
}
async function migrateCacheDatabase(pg: Pool) {
const sqlite = new Database(SQLITE_CACHE_DB, { readonly: true });
try {
// Migrate cached_objects
const objects = sqlite.prepare('SELECT * FROM cached_objects').all() as any[];
console.log(` Migrating ${objects.length} cached objects...`);
for (const obj of objects) {
await pg.query(
`INSERT INTO cached_objects (id, object_key, object_type, label, data, jira_updated_at, jira_created_at, cached_at)
VALUES ($1, $2, $3, $4, $5::jsonb, $6, $7, $8)
ON CONFLICT (id) DO UPDATE SET
object_key = EXCLUDED.object_key,
label = EXCLUDED.label,
data = EXCLUDED.data,
jira_updated_at = EXCLUDED.jira_updated_at,
cached_at = EXCLUDED.cached_at`,
[
obj.id,
obj.object_key,
obj.object_type,
obj.label,
obj.data, // Already JSON string, PostgreSQL will parse it
obj.jira_updated_at,
obj.jira_created_at,
obj.cached_at,
]
);
}
// Migrate object_relations
const relations = sqlite.prepare('SELECT * FROM object_relations').all() as any[];
console.log(` Migrating ${relations.length} relations...`);
for (const rel of relations) {
await pg.query(
`INSERT INTO object_relations (source_id, target_id, attribute_name, source_type, target_type)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (source_id, target_id, attribute_name) DO UPDATE SET
source_type = EXCLUDED.source_type,
target_type = EXCLUDED.target_type`,
[
rel.source_id,
rel.target_id,
rel.attribute_name,
rel.source_type,
rel.target_type,
]
);
}
// Migrate sync_metadata
const metadata = sqlite.prepare('SELECT * FROM sync_metadata').all() as any[];
console.log(` Migrating ${metadata.length} metadata entries...`);
for (const meta of metadata) {
await pg.query(
`INSERT INTO sync_metadata (key, value, updated_at)
VALUES ($1, $2, $3)
ON CONFLICT (key) DO UPDATE SET
value = EXCLUDED.value,
updated_at = EXCLUDED.updated_at`,
[meta.key, meta.value, meta.updated_at]
);
}
console.log(' ✓ Cache database migrated');
} finally {
sqlite.close();
}
}
async function migrateClassificationsDatabase(pg: Pool) {
const sqlite = new Database(SQLITE_CLASSIFICATIONS_DB, { readonly: true });
try {
// Migrate classification_history
const history = sqlite.prepare('SELECT * FROM classification_history').all() as any[];
console.log(` Migrating ${history.length} classification history entries...`);
for (const entry of history) {
await pg.query(
`INSERT INTO classification_history (application_id, application_name, changes, source, timestamp, user_id)
VALUES ($1, $2, $3, $4, $5, $6)
ON CONFLICT DO NOTHING`,
[
entry.application_id,
entry.application_name,
entry.changes,
entry.source,
entry.timestamp,
entry.user_id,
]
);
}
// Migrate session_state
const sessions = sqlite.prepare('SELECT * FROM session_state').all() as any[];
console.log(` Migrating ${sessions.length} session state entries...`);
for (const session of sessions) {
await pg.query(
`INSERT INTO session_state (key, value, updated_at)
VALUES ($1, $2, $3)
ON CONFLICT (key) DO UPDATE SET
value = EXCLUDED.value,
updated_at = EXCLUDED.updated_at`,
[session.key, session.value, session.updated_at]
);
}
console.log(' ✓ Classifications database migrated');
} finally {
sqlite.close();
}
}
// Run migration
migrate().catch(console.error);

View File

@@ -1,284 +0,0 @@
{
"version": "2024.1",
"source": "Zuyderland ICMT - Application Management Framework",
"lastUpdated": "2024-12-19",
"referenceData": {
"applicationStatuses": [
{
"key": "status",
"name": "Status",
"description": "Algemene status",
"order": 0,
"color": "#6b7280",
"includeInFilter": true
},
{
"key": "prod",
"name": "In Production",
"description": "Productie - actief in gebruik",
"order": 1,
"color": "#22c55e",
"includeInFilter": true
},
{
"key": "impl",
"name": "Implementation",
"description": "In implementatie",
"order": 2,
"color": "#3b82f6",
"includeInFilter": true
},
{
"key": "poc",
"name": "Proof of Concept",
"description": "Proefproject",
"order": 3,
"color": "#8b5cf6",
"includeInFilter": true
},
{
"key": "eos",
"name": "End of support",
"description": "Geen ondersteuning meer van leverancier",
"order": 4,
"color": "#f97316",
"includeInFilter": true
},
{
"key": "eol",
"name": "End of life",
"description": "Einde levensduur, wordt uitgefaseerd",
"order": 5,
"color": "#ef4444",
"includeInFilter": true
},
{
"key": "deprecated",
"name": "Deprecated",
"description": "Verouderd, wordt uitgefaseerd",
"order": 6,
"color": "#f97316",
"includeInFilter": true
},
{
"key": "shadow",
"name": "Shadow IT",
"description": "Niet-geautoriseerde IT",
"order": 7,
"color": "#eab308",
"includeInFilter": true
},
{
"key": "closed",
"name": "Closed",
"description": "Afgesloten",
"order": 8,
"color": "#6b7280",
"includeInFilter": true
},
{
"key": "undefined",
"name": "Undefined",
"description": "Niet gedefinieerd",
"order": 9,
"color": "#9ca3af",
"includeInFilter": true
}
],
"dynamicsFactors": [
{
"key": "1",
"name": "Stabiel",
"description": "Weinig wijzigingen, uitgekristalliseerd systeem, < 2 releases/jaar",
"order": 1,
"color": "#22c55e"
},
{
"key": "2",
"name": "Gemiddeld",
"description": "Regelmatige wijzigingen, 2-4 releases/jaar, incidentele projecten",
"order": 2,
"color": "#eab308"
},
{
"key": "3",
"name": "Hoog",
"description": "Veel wijzigingen, > 4 releases/jaar, continue doorontwikkeling",
"order": 3,
"color": "#f97316"
},
{
"key": "4",
"name": "Zeer hoog",
"description": "Continu in beweging, grote transformatieprojecten, veel nieuwe functionaliteit",
"order": 4,
"color": "#ef4444"
}
],
"complexityFactors": [
{
"key": "1",
"name": "Laag",
"description": "Standalone applicatie, geen/weinig integraties, standaard configuratie",
"order": 1,
"color": "#22c55e"
},
{
"key": "2",
"name": "Gemiddeld",
"description": "Enkele integraties, beperkt maatwerk, standaard governance",
"order": 2,
"color": "#eab308"
},
{
"key": "3",
"name": "Hoog",
"description": "Veel integraties, significant maatwerk, meerdere stakeholdergroepen",
"order": 3,
"color": "#f97316"
},
{
"key": "4",
"name": "Zeer hoog",
"description": "Platform met meerdere workloads, uitgebreide governance, veel maatwerk",
"order": 4,
"color": "#ef4444"
}
],
"numberOfUsers": [
{
"key": "1",
"name": "< 100",
"minUsers": 0,
"maxUsers": 99,
"order": 1
},
{
"key": "2",
"name": "100 - 500",
"minUsers": 100,
"maxUsers": 500,
"order": 2
},
{
"key": "3",
"name": "500 - 2.000",
"minUsers": 500,
"maxUsers": 2000,
"order": 3
},
{
"key": "4",
"name": "2.000 - 5.000",
"minUsers": 2000,
"maxUsers": 5000,
"order": 4
},
{
"key": "5",
"name": "5.000 - 10.000",
"minUsers": 5000,
"maxUsers": 10000,
"order": 5
},
{
"key": "6",
"name": "10.000 - 15.000",
"minUsers": 10000,
"maxUsers": 15000,
"order": 6
},
{
"key": "7",
"name": "> 15.000",
"minUsers": 15000,
"maxUsers": null,
"order": 7
}
],
"governanceModels": [
{
"key": "A",
"name": "Centraal Beheer",
"shortDescription": "ICMT voert volledig beheer uit",
"description": "Volledige dienstverlening door ICMT. Dit is het standaardmodel voor kernapplicaties.",
"applicability": "Kernapplicaties met BIA-classificatie D, E of F (belangrijk tot zeer kritiek). Voorbeelden: EPD (HiX), ERP (AFAS), Microsoft 365, kritieke zorgapplicaties.",
"icmtInvolvement": "Volledig",
"businessInvolvement": "Minimaal",
"supplierInvolvement": "Via ICMT",
"order": 1,
"color": "#3b82f6"
},
{
"key": "B",
"name": "Federatief Beheer",
"shortDescription": "ICMT + business delen beheer",
"description": "ICMT en business delen de verantwoordelijkheid. Geschikt voor applicaties met een sterke key user organisatie.",
"applicability": "Kernapplicaties met BIA-classificatie D, E of F (belangrijk tot zeer kritiek). Voorbeelden: EPD (HiX), ERP (AFAS), Microsoft 365, kritieke zorgapplicaties.",
"icmtInvolvement": "Gedeeld",
"businessInvolvement": "Gedeeld",
"supplierInvolvement": "Via ICMT/Business",
"order": 2,
"color": "#8b5cf6"
},
{
"key": "C",
"name": "Uitbesteed met ICMT-Regie",
"shortDescription": "Leverancier beheert, ICMT regisseert",
"description": "Leverancier voert beheer uit, ICMT houdt regie. Dit is het standaardmodel voor SaaS waar ICMT contractpartij is.",
"applicability": "SaaS-applicaties waar ICMT het contract beheert. Voorbeelden: AFAS, diverse zorg-SaaS oplossingen. De mate van FAB-dienstverlening hangt af van de BIA-classificatie.",
"icmtInvolvement": "Regie",
"businessInvolvement": "Gebruiker",
"supplierInvolvement": "Volledig beheer",
"contractHolder": "ICMT",
"order": 3,
"color": "#06b6d4"
},
{
"key": "D",
"name": "Uitbesteed met Business-Regie",
"shortDescription": "Leverancier beheert, business regisseert",
"description": "Business onderhoudt de leveranciersrelatie. ICMT heeft beperkte betrokkenheid.",
"applicability": "SaaS-applicaties waar de business zelf het contract en de leveranciersrelatie beheert. Voorbeelden: niche SaaS tools, afdelingsspecifieke oplossingen, tools waar de business expertise heeft die ICMT niet heeft.",
"icmtInvolvement": "Beperkt",
"businessInvolvement": "Regie",
"supplierInvolvement": "Volledig beheer",
"contractHolder": "Business",
"order": 4,
"color": "#14b8a6"
},
{
"key": "E",
"name": "Volledig Decentraal Beheer",
"shortDescription": "Business voert volledig beheer uit",
"description": "Business voert zelf beheer uit. ICMT heeft minimale betrokkenheid.",
"applicability": "Afdelingsspecifieke tools met beperkte impact, Shadow IT die in kaart is gebracht. Voorbeelden: standalone afdelingstools, pilotapplicaties, persoonlijke productiviteitstools.",
"icmtInvolvement": "Minimaal",
"businessInvolvement": "Volledig",
"supplierInvolvement": "Direct met business",
"order": 5,
"color": "#6b7280"
}
]
},
"visualizations": {
"capacityMatrix": {
"description": "Matrix voor capaciteitsplanning gebaseerd op Dynamiek x Complexiteit",
"formula": "Beheerlast = Dynamiek * Complexiteit * log(Gebruikers)",
"weightings": {
"dynamics": 1.0,
"complexity": 1.2,
"users": 0.3
}
},
"governanceDecisionTree": {
"description": "Beslisboom voor keuze regiemodel",
"factors": [
"BIA-classificatie",
"Hosting type (SaaS/On-prem)",
"Contracthouder",
"Key user maturity"
]
}
}
}

View File

@@ -0,0 +1,56 @@
-- AUTO-GENERATED FILE - DO NOT EDIT MANUALLY
-- Generated from Jira Assets Schema via REST API
-- PostgreSQL version
-- Generated at: 2026-01-09T02:12:50.973Z
--
-- Re-generate with: npm run generate-schema
-- =============================================================================
-- Core Tables
-- =============================================================================
-- Cached CMDB objects (all types stored in single table with JSON data)
CREATE TABLE IF NOT EXISTS cached_objects (
id TEXT PRIMARY KEY,
object_key TEXT NOT NULL UNIQUE,
object_type TEXT NOT NULL,
label TEXT NOT NULL,
data JSONB NOT NULL,
jira_updated_at TEXT,
jira_created_at TEXT,
cached_at TEXT NOT NULL
);
-- Object relations (references between objects)
CREATE TABLE IF NOT EXISTS object_relations (
id SERIAL PRIMARY KEY,
source_id TEXT NOT NULL,
target_id TEXT NOT NULL,
attribute_name TEXT NOT NULL,
source_type TEXT NOT NULL,
target_type TEXT NOT NULL,
UNIQUE(source_id, target_id, attribute_name)
);
-- Sync metadata (tracks sync state)
CREATE TABLE IF NOT EXISTS sync_metadata (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at TEXT NOT NULL
);
-- =============================================================================
-- Indices for Performance
-- =============================================================================
CREATE INDEX IF NOT EXISTS idx_objects_type ON cached_objects(object_type);
CREATE INDEX IF NOT EXISTS idx_objects_key ON cached_objects(object_key);
CREATE INDEX IF NOT EXISTS idx_objects_updated ON cached_objects(jira_updated_at);
CREATE INDEX IF NOT EXISTS idx_objects_label ON cached_objects(label);
CREATE INDEX IF NOT EXISTS idx_objects_data_gin ON cached_objects USING GIN (data);
CREATE INDEX IF NOT EXISTS idx_relations_source ON object_relations(source_id);
CREATE INDEX IF NOT EXISTS idx_relations_target ON object_relations(target_id);
CREATE INDEX IF NOT EXISTS idx_relations_source_type ON object_relations(source_type);
CREATE INDEX IF NOT EXISTS idx_relations_target_type ON object_relations(target_type);
CREATE INDEX IF NOT EXISTS idx_relations_attr ON object_relations(attribute_name);

View File

@@ -73,7 +73,7 @@ app.use((req, res, next) => {
// Health check
app.get('/health', async (req, res) => {
const jiraConnected = await dataService.testConnection();
const cacheStatus = dataService.getCacheStatus();
const cacheStatus = await dataService.getCacheStatus();
res.json({
status: 'ok',

View File

@@ -4,6 +4,8 @@ import { databaseService } from '../services/database.js';
import { cmdbService } from '../services/cmdbService.js';
import { logger } from '../services/logger.js';
import { calculateRequiredEffortApplicationManagementWithBreakdown } from '../services/effortCalculation.js';
import { findBIAMatch, loadBIAData, clearBIACache, calculateSimilarity } from '../services/biaMatchingService.js';
import { calculateApplicationCompleteness } from '../services/dataCompletenessConfig.js';
import type { SearchFilters, ReferenceValue, ClassificationResult, ApplicationDetails, ApplicationStatus } from '../types/index.js';
import type { Server, Flows, Certificate, Domain, AzureSubscription, CMDBObjectTypeName } from '../generated/jira-types.js';
@@ -51,6 +53,270 @@ router.get('/team-dashboard', async (req: Request, res: Response) => {
}
});
// Get team portfolio health metrics
router.get('/team-portfolio-health', async (req: Request, res: Response) => {
try {
const excludedStatusesParam = req.query.excludedStatuses as string | undefined;
let excludedStatuses: ApplicationStatus[] = [];
if (excludedStatusesParam && excludedStatusesParam.trim().length > 0) {
// Parse comma-separated statuses
excludedStatuses = excludedStatusesParam
.split(',')
.map(s => s.trim())
.filter(s => s.length > 0) as ApplicationStatus[];
} else {
// Default to excluding 'Closed' and 'Deprecated' if not specified
excludedStatuses = ['Closed', 'Deprecated'];
}
const data = await dataService.getTeamPortfolioHealth(excludedStatuses);
res.json(data);
} catch (error) {
logger.error('Failed to get team portfolio health', error);
res.status(500).json({ error: 'Failed to get team portfolio health' });
}
});
// Get Business Importance vs BIA comparison
// NOTE: This must come BEFORE the /:id route to avoid route conflicts
router.get('/business-importance-comparison', async (req: Request, res: Response) => {
try {
logger.info('Business Importance comparison endpoint called');
const data = await dataService.getBusinessImportanceComparison();
logger.info(`Business Importance comparison: returning ${data.applications.length} applications`);
res.json(data);
} catch (error) {
logger.error('Failed to get business importance comparison', error);
res.status(500).json({ error: 'Failed to get business importance comparison' });
}
});
// Test BIA data loading (for debugging)
router.get('/bia-test', async (req: Request, res: Response) => {
try {
if (req.query.clear === 'true') {
clearBIACache();
}
const biaData = loadBIAData();
res.json({
recordCount: biaData.length,
records: biaData.slice(0, 20), // First 20 records
sample: biaData.slice(0, 5).map(r => `${r.applicationName} -> ${r.biaValue}`),
});
} catch (error) {
logger.error('Failed to test BIA data loading', error);
res.status(500).json({ error: 'Failed to test BIA data loading', details: error instanceof Error ? error.message : String(error) });
}
});
// Comprehensive debug endpoint for BIA matching
router.get('/bia-debug', async (req: Request, res: Response) => {
try {
clearBIACache();
const biaData = loadBIAData();
// Get a few sample applications
const searchResult = await dataService.searchApplications({}, 1, 50);
const sampleApps = searchResult.applications.slice(0, 10);
// Test specific examples mentioned by user
const testNames = ['Aanmeldzuilen', 'PregnaOne', 'BeagleBoxx'];
const testApps = searchResult.applications.filter(app =>
testNames.some(name =>
app.name.toLowerCase().includes(name.toLowerCase()) ||
(app.searchReference && app.searchReference.toLowerCase().includes(name.toLowerCase()))
)
);
const debugResults = [];
// Test each sample app
for (const app of [...sampleApps, ...testApps]) {
const matchResult = findBIAMatch(app.name, app.searchReference);
// Find all potential matches in Excel data for detailed analysis
const normalizedAppName = app.name.toLowerCase().trim();
const normalizedSearchRef = app.searchReference ? app.searchReference.toLowerCase().trim() : null;
const potentialMatches = biaData.map(record => {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
const exactNameMatch = normalizedAppName === normalizedRecordName;
const exactSearchRefMatch = normalizedSearchRef === normalizedRecordName;
const startsWithApp = normalizedRecordName.startsWith(normalizedAppName) || normalizedAppName.startsWith(normalizedRecordName);
const containsApp = normalizedRecordName.includes(normalizedAppName) || normalizedAppName.includes(normalizedRecordName);
// Calculate similarity using the exported function
const similarity = calculateSimilarity(normalizedAppName, normalizedRecordName);
return {
excelName: record.applicationName,
excelBIA: record.biaValue,
exactNameMatch,
exactSearchRefMatch,
startsWithApp,
containsApp,
similarity: similarity,
};
}).filter(m => m.exactNameMatch || m.exactSearchRefMatch || m.startsWithApp || m.containsApp || m.similarity > 0.5);
debugResults.push({
appName: app.name,
searchReference: app.searchReference,
matchResult: matchResult,
potentialMatches: potentialMatches.slice(0, 5), // Top 5 potential matches
allExcelRecords: biaData.length,
});
}
res.json({
excelDataLoaded: biaData.length,
sampleExcelRecords: biaData.slice(0, 20),
debugResults,
testNames: testNames.map(name => {
const matchingExcel = biaData.filter(r =>
r.applicationName.toLowerCase().includes(name.toLowerCase()) ||
name.toLowerCase().includes(r.applicationName.toLowerCase())
);
return {
name,
matchingExcelRecords: matchingExcel,
};
}),
});
} catch (error) {
logger.error('Failed to debug BIA matching', error);
res.status(500).json({
error: 'Failed to debug BIA matching',
details: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
});
}
});
// Get BIA comparison data
router.get('/bia-comparison', async (req: Request, res: Response) => {
try {
// Clear cache and reload BIA data to ensure fresh data
const { loadBIAData, clearBIACache } = await import('../services/biaMatchingService.js');
clearBIACache();
// Load fresh data
const testBIAData = loadBIAData();
logger.info(`BIA comparison: Loaded ${testBIAData.length} records from Excel file`);
if (testBIAData.length === 0) {
logger.error('BIA comparison: No Excel data loaded - check if BIA.xlsx exists and is readable');
logger.error('This could indicate:');
logger.error(' 1. Excel file not found at backend/data/BIA.xlsx');
logger.error(' 2. Column C (index 2) or K (index 10) not found');
logger.error(' 3. No valid BIA values (A-F) in column K');
} else {
logger.info(`BIA comparison: Sample Excel records: ${testBIAData.slice(0, 5).map(r => `"${r.applicationName}"->${r.biaValue}`).join(', ')}`);
}
// Get all applications (no filters)
const searchResult = await dataService.searchApplications({}, 1, 10000);
logger.info(`BIA comparison: Found ${searchResult.applications.length} applications to compare`);
// Get all Business Impact Analyses for mapping
const biaReferences = await dataService.getBusinessImpactAnalyses();
const biaMap = new Map<string, ReferenceValue>();
biaReferences.forEach(bia => {
// Map by the letter (A-F) - typically the name starts with the letter
const letter = bia.name.charAt(0).toUpperCase();
if (/^[A-F]$/.test(letter)) {
biaMap.set(letter, bia);
}
// Also try to match by key or name containing the letter
if (bia.name.toUpperCase().includes(' - ')) {
const parts = bia.name.split(' - ');
if (parts.length > 0 && /^[A-F]$/.test(parts[0].trim().toUpperCase())) {
biaMap.set(parts[0].trim().toUpperCase(), bia);
}
}
});
const applications = searchResult.applications;
const comparisonItems = [];
let matched = 0;
let mismatched = 0;
let notFound = 0;
let noExcelBIA = 0;
for (const app of applications) {
// Find BIA match in Excel
const matchResult = findBIAMatch(app.name, app.searchReference);
// Log first few matches for debugging
if (comparisonItems.length < 5) {
logger.debug(`BIA match for "${app.name}": ${matchResult.biaValue || 'NOT FOUND'} (type: ${matchResult.matchType || 'none'})`);
}
// Extract BIA letter from current BIA name (handle formats like "A", "A - Test/Archief", etc.)
let currentBIALetter: string | null = null;
if (app.businessImpactAnalyse?.name) {
const match = app.businessImpactAnalyse.name.match(/^([A-F])/);
if (match) {
currentBIALetter = match[1].toUpperCase();
} else {
// Fallback to first character
currentBIALetter = app.businessImpactAnalyse.name.charAt(0).toUpperCase();
if (!/^[A-F]$/.test(currentBIALetter)) {
currentBIALetter = null;
}
}
}
const excelBIALetter = matchResult.biaValue;
let matchStatus: 'match' | 'mismatch' | 'not_found' | 'no_excel_bia';
if (!excelBIALetter) {
matchStatus = 'no_excel_bia';
noExcelBIA++;
} else if (!currentBIALetter) {
matchStatus = 'not_found';
notFound++;
} else if (currentBIALetter === excelBIALetter) {
matchStatus = 'match';
matched++;
} else {
matchStatus = 'mismatch';
mismatched++;
}
comparisonItems.push({
id: app.id,
key: app.key,
name: app.name,
searchReference: app.searchReference,
currentBIA: app.businessImpactAnalyse,
excelBIA: excelBIALetter,
excelApplicationName: matchResult.excelApplicationName,
matchStatus,
matchType: matchResult.matchType,
matchConfidence: matchResult.matchConfidence,
allMatches: matchResult.allMatches,
});
}
res.json({
applications: comparisonItems,
summary: {
total: applications.length,
matched,
mismatched,
notFound,
noExcelBIA,
},
});
} catch (error) {
logger.error('Failed to get BIA comparison', error);
res.status(500).json({ error: 'Failed to get BIA comparison' });
}
});
// Get application by ID
// Query params:
// - mode=edit: Force refresh from Jira for editing (includes _jiraUpdatedAt for conflict detection)
@@ -60,7 +326,7 @@ router.get('/:id', async (req: Request, res: Response) => {
const mode = req.query.mode as string | undefined;
// Don't treat special routes as application IDs
if (id === 'team-dashboard' || id === 'calculate-effort' || id === 'search') {
if (id === 'team-dashboard' || id === 'team-portfolio-health' || id === 'business-importance-comparison' || id === 'bia-comparison' || id === 'bia-test' || id === 'calculate-effort' || id === 'search') {
res.status(404).json({ error: 'Route not found' });
return;
}
@@ -75,7 +341,14 @@ router.get('/:id', async (req: Request, res: Response) => {
return;
}
res.json(application);
// Calculate data completeness percentage
const completenessPercentage = calculateApplicationCompleteness(application);
const applicationWithCompleteness = {
...application,
dataCompletenessPercentage: Math.round(completenessPercentage * 10) / 10, // Round to 1 decimal
};
res.json(applicationWithCompleteness);
} catch (error) {
logger.error('Failed to get application', error);
res.status(500).json({ error: 'Failed to get application' });
@@ -183,7 +456,7 @@ router.put('/:id', async (req: Request, res: Response) => {
source: actualUpdates.source || 'MANUAL',
timestamp: new Date(),
};
databaseService.saveClassificationResult(classificationResult);
await databaseService.saveClassificationResult(classificationResult);
// Return updated application
const updatedApp = result.data || await dataService.getApplicationById(id);
@@ -279,7 +552,7 @@ router.post('/calculate-effort', async (req: Request, res: Response) => {
router.get('/:id/history', async (req: Request, res: Response) => {
try {
const { id } = req.params;
const history = databaseService.getClassificationsByApplicationId(id);
const history = await databaseService.getClassificationsByApplicationId(id);
res.json(history);
} catch (error) {
logger.error('Failed to get classification history', error);
@@ -352,7 +625,7 @@ router.get('/:id/related/:objectType', async (req: Request, res: Response) => {
const objects = relatedObjects.map(obj => {
// Extract attributes from the object
const attributes: Record<string, string | null> = {};
const objData = obj as Record<string, unknown>;
const objData = obj as unknown as Record<string, unknown>;
// If specific attributes are requested, extract those
if (requestedAttrs.length > 0) {

View File

@@ -14,15 +14,38 @@ import type { CMDBObjectTypeName } from '../generated/jira-types.js';
const router = Router();
// Get cache status
router.get('/status', (req: Request, res: Response) => {
router.get('/status', async (req: Request, res: Response) => {
try {
const cacheStats = cacheStore.getStats();
const syncStatus = syncEngine.getStatus();
const cacheStats = await cacheStore.getStats();
const syncStatus = await syncEngine.getStatus();
// Compare cache count with Jira count for ApplicationComponent
let jiraComparison: { jiraCount?: number; cacheCount: number; difference?: number } | undefined;
if (cacheStats.objectsByType['ApplicationComponent'] !== undefined) {
try {
const { jiraAssetsClient } = await import('../services/jiraAssetsClient.js');
const { OBJECT_TYPES } = await import('../generated/jira-schema.js');
const typeDef = OBJECT_TYPES['ApplicationComponent'];
if (typeDef) {
const searchResult = await jiraAssetsClient.searchObjects(`objectType = "${typeDef.name}"`, 1, 1);
const jiraCount = searchResult.totalCount;
const cacheCount = cacheStats.objectsByType['ApplicationComponent'] || 0;
jiraComparison = {
jiraCount,
cacheCount,
difference: jiraCount - cacheCount,
};
}
} catch (err) {
logger.debug('Could not fetch Jira count for comparison', err);
}
}
res.json({
cache: cacheStats,
sync: syncStatus,
supportedTypes: Object.keys(OBJECT_TYPES),
jiraComparison,
});
} catch (error) {
logger.error('Failed to get cache status', error);
@@ -87,7 +110,7 @@ router.post('/sync/:objectType', async (req: Request, res: Response) => {
});
// Clear cache for a specific type
router.delete('/clear/:objectType', (req: Request, res: Response) => {
router.delete('/clear/:objectType', async (req: Request, res: Response) => {
try {
const { objectType } = req.params;
@@ -101,7 +124,7 @@ router.delete('/clear/:objectType', (req: Request, res: Response) => {
logger.info(`Clearing cache for ${objectType}`);
const deleted = cacheStore.clearObjectType(objectType as CMDBObjectTypeName);
const deleted = await cacheStore.clearObjectType(objectType as CMDBObjectTypeName);
res.json({
status: 'cleared',
@@ -115,11 +138,11 @@ router.delete('/clear/:objectType', (req: Request, res: Response) => {
});
// Clear entire cache
router.delete('/clear', (req: Request, res: Response) => {
router.delete('/clear', async (req: Request, res: Response) => {
try {
logger.info('Clearing entire cache');
cacheStore.clearAll();
await cacheStore.clearAll();
res.json({
status: 'cleared',

View File

@@ -68,10 +68,10 @@ router.get('/function/:code', (req: Request, res: Response) => {
});
// Get classification history
router.get('/history', (req: Request, res: Response) => {
router.get('/history', async (req: Request, res: Response) => {
try {
const limit = parseInt(req.query.limit as string) || 50;
const history = databaseService.getClassificationHistory(limit);
const history = await databaseService.getClassificationHistory(limit);
res.json(history);
} catch (error) {
logger.error('Failed to get classification history', error);
@@ -80,9 +80,9 @@ router.get('/history', (req: Request, res: Response) => {
});
// Get classification stats
router.get('/stats', (req: Request, res: Response) => {
router.get('/stats', async (req: Request, res: Response) => {
try {
const dbStats = databaseService.getStats();
const dbStats = await databaseService.getStats();
res.json(dbStats);
} catch (error) {
logger.error('Failed to get classification stats', error);

View File

@@ -5,6 +5,7 @@ import { fileURLToPath } from 'url';
import { logger } from '../services/logger.js';
import { clearEffortCalculationConfigCache, getEffortCalculationConfigV25 } from '../services/effortCalculation.js';
import type { EffortCalculationConfig, EffortCalculationConfigV25 } from '../config/effortCalculation.js';
import type { DataCompletenessConfig } from '../types/index.js';
// Get __dirname equivalent for ES modules
const __filename = fileURLToPath(import.meta.url);
@@ -15,6 +16,7 @@ const router = Router();
// Path to the configuration files
const CONFIG_FILE_PATH = join(__dirname, '../../data/effort-calculation-config.json');
const CONFIG_FILE_PATH_V25 = join(__dirname, '../../data/effort-calculation-config-v25.json');
const COMPLETENESS_CONFIG_FILE_PATH = join(__dirname, '../../data/data-completeness-config.json');
/**
* Get the current effort calculation configuration (legacy)
@@ -122,5 +124,143 @@ router.put('/effort-calculation-v25', async (req: Request, res: Response) => {
}
});
/**
* Get the data completeness configuration
*/
router.get('/data-completeness', async (req: Request, res: Response) => {
try {
// Try to read from JSON file, fallback to default config
try {
const fileContent = await readFile(COMPLETENESS_CONFIG_FILE_PATH, 'utf-8');
const config = JSON.parse(fileContent) as DataCompletenessConfig;
res.json(config);
} catch (fileError) {
// If file doesn't exist, return default config
const defaultConfig: DataCompletenessConfig = {
metadata: {
version: '1.0.0',
description: 'Configuration for Data Completeness Score fields',
lastUpdated: new Date().toISOString(),
},
categories: {
general: {
name: 'General',
description: 'General application information fields',
fields: [
{ name: 'Organisation', fieldPath: 'organisation', enabled: true },
{ name: 'ApplicationFunction', fieldPath: 'applicationFunctions', enabled: true },
{ name: 'Status', fieldPath: 'status', enabled: true },
{ name: 'Business Impact Analyse', fieldPath: 'businessImpactAnalyse', enabled: true },
{ name: 'Application Component Hosting Type', fieldPath: 'hostingType', enabled: true },
{ name: 'Supplier Product', fieldPath: 'supplierProduct', enabled: true },
{ name: 'Business Owner', fieldPath: 'businessOwner', enabled: true },
{ name: 'System Owner', fieldPath: 'systemOwner', enabled: true },
{ name: 'Functional Application Management', fieldPath: 'functionalApplicationManagement', enabled: true },
{ name: 'Technical Application Management', fieldPath: 'technicalApplicationManagement', enabled: true },
],
},
applicationManagement: {
name: 'Application Management',
description: 'Application management classification fields',
fields: [
{ name: 'ICT Governance Model', fieldPath: 'governanceModel', enabled: true },
{ name: 'Application Management - Application Type', fieldPath: 'applicationType', enabled: true },
{ name: 'Application Management - Hosting', fieldPath: 'applicationManagementHosting', enabled: true },
{ name: 'Application Management - TAM', fieldPath: 'applicationManagementTAM', enabled: true },
{ name: 'Application Management - Dynamics Factor', fieldPath: 'dynamicsFactor', enabled: true },
{ name: 'Application Management - Complexity Factor', fieldPath: 'complexityFactor', enabled: true },
{ name: 'Application Management - Number of Users', fieldPath: 'numberOfUsers', enabled: true },
],
},
},
};
res.json(defaultConfig);
}
} catch (error) {
logger.error('Failed to get data completeness configuration', error);
res.status(500).json({ error: 'Failed to get configuration' });
}
});
/**
* Update the data completeness configuration
*/
router.put('/data-completeness', async (req: Request, res: Response) => {
try {
const config = req.body as DataCompletenessConfig;
// Validate the configuration structure
if (!config.categories || !Array.isArray(config.categories)) {
res.status(400).json({ error: 'Invalid configuration: categories must be an array' });
return;
}
if (config.categories.length === 0) {
res.status(400).json({ error: 'Invalid configuration: must have at least one category' });
return;
}
// Validate each category
for (const category of config.categories) {
if (!category.id || typeof category.id !== 'string') {
res.status(400).json({ error: 'Invalid configuration: each category must have an id' });
return;
}
if (!category.name || typeof category.name !== 'string') {
res.status(400).json({ error: 'Invalid configuration: each category must have a name' });
return;
}
if (!Array.isArray(category.fields)) {
res.status(400).json({ error: 'Invalid configuration: category fields must be arrays' });
return;
}
// Validate each field
for (const field of category.fields) {
if (!field.id || typeof field.id !== 'string') {
res.status(400).json({ error: 'Invalid configuration: each field must have an id' });
return;
}
if (!field.name || typeof field.name !== 'string') {
res.status(400).json({ error: 'Invalid configuration: each field must have a name' });
return;
}
if (!field.fieldPath || typeof field.fieldPath !== 'string') {
res.status(400).json({ error: 'Invalid configuration: each field must have a fieldPath' });
return;
}
if (typeof field.enabled !== 'boolean') {
res.status(400).json({ error: 'Invalid configuration: each field must have an enabled boolean' });
return;
}
}
}
// Update metadata
config.metadata = {
...config.metadata,
lastUpdated: new Date().toISOString(),
};
// Write to JSON file
await writeFile(COMPLETENESS_CONFIG_FILE_PATH, JSON.stringify(config, null, 2), 'utf-8');
// Clear the cache so the new config is loaded on next request
const { clearDataCompletenessConfigCache } = await import('../services/dataCompletenessConfig.js');
clearDataCompletenessConfigCache();
logger.info('Data completeness configuration updated');
res.json({ success: true, message: 'Configuration saved successfully' });
} catch (error) {
logger.error('Failed to update data completeness configuration', error);
res.status(500).json({ error: 'Failed to save configuration' });
}
});
export default router;

File diff suppressed because it is too large Load Diff

View File

@@ -51,7 +51,7 @@ router.get('/:type', async (req: Request, res: Response) => {
searchTerm: search,
});
const count = cmdbService.countObjects(type as CMDBObjectTypeName);
const count = await cmdbService.countObjects(type as CMDBObjectTypeName);
res.json({
objectType: type,

View File

@@ -1,6 +1,10 @@
import { Router } from 'express';
import { OBJECT_TYPES, SCHEMA_GENERATED_AT, SCHEMA_OBJECT_TYPE_COUNT, SCHEMA_TOTAL_ATTRIBUTES } from '../generated/jira-schema.js';
import type { ObjectTypeDefinition, AttributeDefinition } from '../generated/jira-schema.js';
import { dataService } from '../services/dataService.js';
import { logger } from '../services/logger.js';
import { jiraAssetsClient } from '../services/jiraAssetsClient.js';
import type { CMDBObjectTypeName } from '../generated/jira-types.js';
const router = Router();
@@ -27,13 +31,15 @@ interface SchemaResponse {
totalAttributes: number;
};
objectTypes: Record<string, ObjectTypeWithLinks>;
cacheCounts?: Record<string, number>; // Cache counts by type name (from objectsByType)
jiraCounts?: Record<string, number>; // Actual counts from Jira Assets API
}
/**
* GET /api/schema
* Returns the complete Jira Assets schema with object types, attributes, and links
*/
router.get('/', (req, res) => {
router.get('/', async (req, res) => {
try {
// Build links between object types
const objectTypesWithLinks: Record<string, ObjectTypeWithLinks> = {};
@@ -72,6 +78,41 @@ router.get('/', (req, res) => {
}
}
// Get cache counts (objectsByType) if available
let cacheCounts: Record<string, number> | undefined;
try {
const cacheStatus = await dataService.getCacheStatus();
cacheCounts = cacheStatus.objectsByType;
} catch (err) {
logger.debug('Could not fetch cache counts for schema response', err);
// Continue without cache counts - not critical
}
// Fetch actual counts from Jira Assets for all object types
// This ensures the counts match exactly what's in Jira Assets
const jiraCounts: Record<string, number> = {};
const typeNames = Object.keys(OBJECT_TYPES) as CMDBObjectTypeName[];
logger.info(`Schema: Fetching object counts from Jira Assets for ${typeNames.length} object types...`);
// Fetch counts in parallel for better performance
const countPromises = typeNames.map(async (typeName) => {
try {
const count = await jiraAssetsClient.getObjectCount(typeName);
jiraCounts[typeName] = count;
return { typeName, count };
} catch (error) {
logger.warn(`Schema: Failed to get count for ${typeName}`, error);
// Use 0 as fallback if API call fails
jiraCounts[typeName] = 0;
return { typeName, count: 0 };
}
});
await Promise.all(countPromises);
logger.info(`Schema: Fetched counts for ${Object.keys(jiraCounts).length} object types from Jira Assets`);
const response: SchemaResponse = {
metadata: {
generatedAt: SCHEMA_GENERATED_AT,
@@ -79,6 +120,8 @@ router.get('/', (req, res) => {
totalAttributes: SCHEMA_TOTAL_ATTRIBUTES,
},
objectTypes: objectTypesWithLinks,
cacheCounts,
jiraCounts,
};
res.json(response);

View File

@@ -0,0 +1,678 @@
/**
* BIA Matching Service
*
* Provides functionality to:
* - Load BIA data from Excel file
* - Match applications with Excel BIA values using smart algorithms
*/
import { readFileSync, existsSync } from 'fs';
import { join } from 'path';
import { dirname } from 'path';
import { fileURLToPath } from 'url';
import * as XLSX from 'xlsx';
import { logger } from './logger.js';
// Get __dirname equivalent for ES modules
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// BIA Excel data cache
export interface BIARecord {
applicationName: string;
biaValue: string;
}
export interface BIAMatchResult {
biaValue: string | null;
excelApplicationName: string | null;
matchType: 'exact' | 'search_reference' | 'fuzzy' | null;
matchConfidence?: number;
allMatches?: Array<{
excelApplicationName: string;
biaValue: string;
matchType: 'exact' | 'search_reference' | 'partial_starts' | 'partial_contains' | 'fuzzy';
confidence: number;
}>;
}
let biaDataCache: BIARecord[] | null = null;
let biaDataCacheTimestamp: number = 0;
const BIA_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
/**
* Clear the BIA data cache (useful for debugging or forcing reload)
*/
export function clearBIACache(): void {
biaDataCache = null;
biaDataCacheTimestamp = 0;
logger.info('BIA data cache cleared');
}
/**
* Load BIA data from Excel file
*/
export function loadBIAData(): BIARecord[] {
const now = Date.now();
// Return cached data if still valid AND has records
// Don't use cache if it's empty (indicates previous load failure)
if (biaDataCache && biaDataCache.length > 0 && (now - biaDataCacheTimestamp) < BIA_CACHE_TTL) {
logger.debug(`Using cached BIA data (${biaDataCache.length} records, cached ${Math.round((now - biaDataCacheTimestamp) / 1000)}s ago)`);
return biaDataCache;
}
// Clear cache if it's empty or expired
if (biaDataCache && biaDataCache.length === 0) {
logger.warn('Cache contains 0 records, clearing and reloading from Excel file');
biaDataCache = null;
biaDataCacheTimestamp = 0;
}
logger.info('Loading BIA data from Excel file (cache expired, empty, or not available)');
// Try multiple possible paths for BIA.xlsx
const possiblePaths = [
join(__dirname, '../../data/BIA.xlsx'), // From dist/services/ -> backend/data/
join(process.cwd(), 'backend/data/BIA.xlsx'), // From project root
join(process.cwd(), 'data/BIA.xlsx'), // From current working directory
join(__dirname, '../../../backend/data/BIA.xlsx'), // Alternative path
];
let biaFilePath: string | null = null;
for (const path of possiblePaths) {
if (existsSync(path)) {
biaFilePath = path;
logger.info(`Found BIA.xlsx at: ${path}`);
break;
} else {
logger.debug(`BIA.xlsx not found at: ${path}`);
}
}
if (!biaFilePath) {
logger.error(`BIA.xlsx file not found in any of the following locations:`);
possiblePaths.forEach(p => logger.error(` - ${p}`));
logger.error(`Current working directory: ${process.cwd()}`);
logger.error(`__dirname: ${__dirname}`);
biaDataCache = [];
biaDataCacheTimestamp = now;
return [];
}
logger.info(`Loading BIA data from: ${biaFilePath}`);
try {
// Read file using readFileSync and then parse with XLSX.read
// This works better in ES modules than XLSX.readFile
const fileBuffer = readFileSync(biaFilePath);
const workbook = XLSX.read(fileBuffer, { type: 'buffer' });
const sheetName = workbook.SheetNames[0];
const worksheet = workbook.Sheets[sheetName];
const data = XLSX.utils.sheet_to_json(worksheet, { header: 1 }) as any[][];
logger.info(`Loaded Excel file: ${data.length} rows, first row has ${data[0]?.length || 0} columns`);
if (data.length > 0 && data[0]) {
logger.info(`First row (header?): Column C (index 2) = "${data[0][2] || '(empty)'}", Column K (index 10) = "${data[0][10] || '(empty)'}"`);
logger.info(`First row all columns (first 12): ${data[0].slice(0, 12).map((cell: any, idx: number) => `[${String.fromCharCode(65 + idx)}: ${String(cell || '').substring(0, 20)}]`).join(' | ')}`);
}
if (data.length > 1 && data[1]) {
logger.info(`Second row (first data?): Column C = "${data[1][2] || '(empty)'}", Column K = "${data[1][10] || '(empty)'}"`);
}
if (data.length > 2 && data[2]) {
logger.info(`Third row: Column C = "${data[2][2] || '(empty)'}", Column K = "${data[2][10] || '(empty)'}"`);
}
// User confirmed: Column C (index 2) = "BIA - Informatiemiddel", Column K (index 10) = "BIA - Bruto risicoscore"
// ALWAYS use these column positions - don't try to detect
const applicationNameColumnIndex = 2; // Column C
const biaValueColumnIndex = 10; // Column K
// Find header row by checking if column C contains "BIA - Informatiemiddel"
let headerRowIndex = -1;
for (let i = 0; i < Math.min(5, data.length); i++) {
const row = data[i];
if (!row || row.length < 11) continue; // Need at least column K (index 10)
const cellC = String(row[2] || '').trim().toLowerCase();
if (cellC.includes('bia') && cellC.includes('informatiemiddel')) {
headerRowIndex = i;
logger.info(`Found header row at index ${i}: Column C = "${row[2]}", Column K = "${row[10] || '(empty)'}"`);
break;
}
}
// If header not found, assume row 0 is header
if (headerRowIndex === -1) {
headerRowIndex = 0;
logger.warn(`Header row not found, assuming row 0 is header. Column C = "${data[0]?.[2] || '(empty)'}", Column K = "${data[0]?.[10] || '(empty)'}"`);
}
logger.info(`Using BIA columns: Application name at column C (index ${applicationNameColumnIndex}), BIA value at column K (index ${biaValueColumnIndex})`);
logger.info(`Header row: ${headerRowIndex}, will start reading data from row ${headerRowIndex + 1}`);
// Extract data starting from the row after the header
const records: BIARecord[] = [];
let skippedRows = 0;
let rowsWithoutBIA = 0;
for (let i = headerRowIndex + 1; i < data.length; i++) {
const row = data[i];
if (!row || row.length <= applicationNameColumnIndex) {
skippedRows++;
continue;
}
const applicationName = String(row[applicationNameColumnIndex] || '').trim();
if (!applicationName || applicationName.length === 0) {
skippedRows++;
continue; // Skip empty rows
}
// Get BIA value from column K (index 10)
let biaValue = '';
if (row.length > biaValueColumnIndex) {
biaValue = String(row[biaValueColumnIndex] || '').trim().toUpperCase();
} else {
rowsWithoutBIA++;
logger.debug(`Row ${i + 1} does not have enough columns for BIA value (need column K, index ${biaValueColumnIndex}, but row has only ${row.length} columns). App name: "${applicationName}"`);
continue;
}
// Extract just the letter if the value contains more than just A-F (e.g., "A - Test/Archief")
if (biaValue && !/^[A-F]$/.test(biaValue)) {
const match = biaValue.match(/^([A-F])/);
if (match) {
biaValue = match[1];
} else {
// If no A-F found, skip this row
rowsWithoutBIA++;
logger.debug(`Row ${i + 1}: BIA value "${row[biaValueColumnIndex]}" does not contain A-F. App name: "${applicationName}"`);
continue;
}
}
// Only add record if we have both application name and valid BIA value (A-F)
if (applicationName && biaValue && /^[A-F]$/.test(biaValue)) {
records.push({
applicationName: applicationName,
biaValue: biaValue,
});
} else if (applicationName && !biaValue) {
rowsWithoutBIA++;
logger.debug(`Row ${i + 1}: Application "${applicationName}" has no BIA value in column K`);
}
}
logger.info(`Processed ${data.length - headerRowIndex - 1} data rows: ${records.length} valid records, ${skippedRows} empty rows skipped, ${rowsWithoutBIA} rows without valid BIA value`);
logger.info(`Loaded ${records.length} BIA records from Excel file`);
if (records.length > 0) {
logger.info(`Sample BIA records (first 10):`);
records.slice(0, 10).forEach((r, idx) => {
logger.info(` ${idx + 1}. "${r.applicationName}" -> BIA: ${r.biaValue}`);
});
if (records.length > 10) {
logger.info(` ... and ${records.length - 10} more records`);
}
} else {
logger.error('No BIA records loaded from Excel file - check file format and column detection');
logger.error(`Header row index: ${headerRowIndex}, Application name column: C (index ${applicationNameColumnIndex}), BIA value column: K (index ${biaValueColumnIndex})`);
logger.error(`Total rows in Excel: ${data.length}, checking rows from ${headerRowIndex + 1} to ${data.length - 1}`);
logger.error(`Skipped ${skippedRows} empty rows, ${rowsWithoutBIA} rows without valid BIA value`);
// Log a few sample rows to help debug
if (data.length > headerRowIndex + 1) {
logger.error('Sample rows from Excel:');
for (let sampleRow = headerRowIndex + 1; sampleRow < Math.min(headerRowIndex + 6, data.length); sampleRow++) {
const row = data[sampleRow];
if (row) {
const appName = String(row[2] || '').trim();
const biaVal = String(row[10] || '').trim();
logger.error(` Row ${sampleRow + 1}: Column C = "${appName || '(empty)'}", Column K = "${biaVal || '(empty)'}"`);
}
}
}
}
biaDataCache = records;
biaDataCacheTimestamp = now;
return records;
} catch (error) {
logger.error('Failed to load BIA data from Excel', error);
biaDataCache = [];
biaDataCacheTimestamp = now;
return [];
}
}
/**
* Calculate Levenshtein distance for fuzzy matching
*/
function levenshteinDistance(str1: string, str2: string): number {
const matrix: number[][] = [];
const len1 = str1.length;
const len2 = str2.length;
if (len1 === 0) return len2;
if (len2 === 0) return len1;
for (let i = 0; i <= len1; i++) {
matrix[i] = [i];
}
for (let j = 0; j <= len2; j++) {
matrix[0][j] = j;
}
for (let i = 1; i <= len1; i++) {
for (let j = 1; j <= len2; j++) {
const cost = str1[i - 1] === str2[j - 1] ? 0 : 1;
matrix[i][j] = Math.min(
matrix[i - 1][j] + 1, // deletion
matrix[i][j - 1] + 1, // insertion
matrix[i - 1][j - 1] + cost // substitution
);
}
}
return matrix[len1][len2];
}
/**
* Calculate similarity score (0-1, where 1 is identical)
*/
export function calculateSimilarity(str1: string, str2: string): number {
const maxLen = Math.max(str1.length, str2.length);
if (maxLen === 0) return 1;
const distance = levenshteinDistance(str1.toLowerCase(), str2.toLowerCase());
return 1 - (distance / maxLen);
}
/**
* Tokenize a string into words (handles special characters, hyphens, etc.)
*/
function tokenize(str: string): string[] {
return str
.toLowerCase()
.replace(/[^\w\s-]/g, ' ') // Replace special chars with space
.split(/[\s-]+/) // Split on spaces and hyphens
.filter(t => t.length > 0); // Remove empty tokens
}
/**
* Calculate word-based similarity (percentage of matching words)
*/
function wordBasedSimilarity(str1: string, str2: string): number {
const tokens1 = new Set(tokenize(str1));
const tokens2 = new Set(tokenize(str2));
if (tokens1.size === 0 && tokens2.size === 0) return 1;
if (tokens1.size === 0 || tokens2.size === 0) return 0;
const intersection = new Set([...tokens1].filter(t => tokens2.has(t)));
const union = new Set([...tokens1, ...tokens2]);
return intersection.size / union.size; // Jaccard similarity
}
/**
* Find BIA match for an application using smart matching algorithm
*
* Matching priority:
* 1. Exact match on application name (case-insensitive)
* 2. Exact match on search reference (if available)
* 3. Partial match (starts with / contains)
* 4. Word-based matching (token matching)
* 5. Fuzzy match using Levenshtein distance (threshold 0.6)
*
* If multiple matches are found, the best one is selected based on:
* - Match type priority (exact > partial > word-based > fuzzy)
* - Confidence/similarity score
* - Length similarity (prefer matches with similar length)
*/
export function findBIAMatch(
applicationName: string,
searchReference: string | null
): BIAMatchResult {
const biaData = loadBIAData();
if (biaData.length === 0) {
logger.warn(`No BIA data available for lookup of "${applicationName}" (biaData.length = 0)`);
return {
biaValue: null,
excelApplicationName: null,
matchType: null,
};
}
logger.info(`[BIA MATCH] Searching for "${applicationName}"${searchReference ? ` (searchRef: "${searchReference}")` : ''} in ${biaData.length} Excel records`);
const normalizedAppName = applicationName.toLowerCase().trim();
const normalizedSearchRef = searchReference ? searchReference.toLowerCase().trim() : null;
// Log first few Excel records for debugging
if (biaData.length > 0) {
logger.info(`[BIA MATCH] Sample Excel records: ${biaData.slice(0, 5).map(r => `"${r.applicationName}"`).join(', ')}`);
}
// Step 1: Try exact match on name (case-insensitive)
logger.info(`[BIA MATCH] Step 1: Trying exact name match for "${normalizedAppName}"`);
for (const record of biaData) {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
if (normalizedAppName === normalizedRecordName) {
logger.info(`[BIA MATCH] ✓ Found exact BIA match on name: "${applicationName}" = "${record.applicationName}" -> BIA: ${record.biaValue}`);
return {
biaValue: record.biaValue,
excelApplicationName: record.applicationName,
matchType: 'exact',
};
}
}
logger.info(`[BIA MATCH] Step 1: No exact name match found`);
// Step 2: Try exact match on search reference (if available)
if (normalizedSearchRef) {
logger.info(`[BIA MATCH] Step 2: Trying exact search reference match for "${normalizedSearchRef}"`);
for (const record of biaData) {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
if (normalizedSearchRef === normalizedRecordName) {
logger.info(`[BIA MATCH] ✓ Found exact BIA match on search reference: "${searchReference}" = "${record.applicationName}" -> BIA: ${record.biaValue}`);
return {
biaValue: record.biaValue,
excelApplicationName: record.applicationName,
matchType: 'search_reference',
};
}
}
logger.info(`[BIA MATCH] Step 2: No exact search reference match found`);
} else {
logger.info(`[BIA MATCH] Step 2: Skipped (no search reference available)`);
}
// Step 2.5: Try partial match (one name contains the other or starts with the other)
// This handles cases like "Aanmeldzuilen" matching "Aanmeldzuilen LogisP" or "Awareways" matching "Awareways E-Learning"
logger.info(`[BIA MATCH] Step 2.5: Trying partial match (starts with / contains) for "${normalizedAppName}"`);
let bestPartialMatch: { value: string; recordName: string; confidence: number; type: 'partial_starts' | 'partial_contains' } | null = null;
const allPartialMatches: Array<{ value: string; recordName: string; type: 'partial_starts' | 'partial_contains'; confidence: number }> = [];
for (const record of biaData) {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
// Check if one name starts with the other (strongest signal)
if (normalizedAppName.startsWith(normalizedRecordName) || normalizedRecordName.startsWith(normalizedAppName)) {
// Calculate confidence based on length ratio
const shorter = Math.min(normalizedAppName.length, normalizedRecordName.length);
const longer = Math.max(normalizedAppName.length, normalizedRecordName.length);
const baseConfidence = shorter / longer;
// For "starts with" matches, boost confidence if the shorter name is at least 5 characters
const confidence = shorter >= 5 ? Math.max(baseConfidence, 0.45) : baseConfidence;
allPartialMatches.push({
value: record.biaValue,
recordName: record.applicationName,
type: 'partial_starts',
confidence,
});
if (!bestPartialMatch || confidence > bestPartialMatch.confidence) {
bestPartialMatch = {
value: record.biaValue,
recordName: record.applicationName,
confidence,
type: 'partial_starts',
};
}
}
// Also check if one contains the other (weaker signal, but still valid)
else if (normalizedAppName.includes(normalizedRecordName) || normalizedRecordName.includes(normalizedAppName)) {
const shorter = Math.min(normalizedAppName.length, normalizedRecordName.length);
const longer = Math.max(normalizedAppName.length, normalizedRecordName.length);
const confidence = (shorter / longer) * 0.8; // Lower confidence for contains vs starts with
allPartialMatches.push({
value: record.biaValue,
recordName: record.applicationName,
type: 'partial_contains',
confidence,
});
if (!bestPartialMatch || confidence > bestPartialMatch.confidence) {
bestPartialMatch = {
value: record.biaValue,
recordName: record.applicationName,
confidence,
type: 'partial_contains',
};
}
}
}
if (allPartialMatches.length > 0) {
logger.info(`[BIA MATCH] Step 2.5: Found ${allPartialMatches.length} partial matches: ${allPartialMatches.slice(0, 5).map(m => `"${m.recordName}" (${m.type}, conf: ${(m.confidence * 100).toFixed(1)}%)`).join(', ')}`);
if (allPartialMatches.length > 1) {
logger.info(`[BIA MATCH] Multiple partial matches found! All matches: ${allPartialMatches.map(m => `"${m.recordName}" (${(m.confidence * 100).toFixed(1)}%)`).join(', ')}`);
}
} else {
logger.info(`[BIA MATCH] Step 2.5: No partial matches found`);
}
// Lower threshold for "starts with" matches (0.4 instead of 0.5) to handle cases like
// "Awareways" matching "Awareways E-Learning" where confidence is 9/21 = 0.43
if (bestPartialMatch && bestPartialMatch.confidence >= 0.4) {
logger.info(`[BIA MATCH] ✓ Found partial BIA match: "${applicationName}" -> "${bestPartialMatch.recordName}": ${bestPartialMatch.value} (confidence: ${(bestPartialMatch.confidence * 100).toFixed(1)}%)`);
// Sort all matches by confidence (descending) for transparency
const sortedMatches = allPartialMatches
.filter(m => m.confidence >= 0.4)
.sort((a, b) => b.confidence - a.confidence)
.map(m => ({
excelApplicationName: m.recordName,
biaValue: m.value,
matchType: m.type as 'partial_starts' | 'partial_contains',
confidence: m.confidence,
}));
return {
biaValue: bestPartialMatch.value,
excelApplicationName: bestPartialMatch.recordName,
matchType: 'fuzzy', // Use fuzzy type for partial matches
matchConfidence: bestPartialMatch.confidence,
allMatches: sortedMatches.length > 1 ? sortedMatches : undefined,
};
} else if (bestPartialMatch) {
logger.info(`[BIA MATCH] Step 2.5: Best partial match confidence (${(bestPartialMatch.confidence * 100).toFixed(1)}%) below threshold (40%)`);
}
// Step 2.6: Try word-based matching (token matching)
// This helps with cases where words match but order differs, or extra words are present
logger.info(`[BIA MATCH] Step 2.6: Trying word-based matching for "${normalizedAppName}"`);
let bestWordMatch: { value: string; recordName: string; confidence: number } | null = null;
const allWordMatches: Array<{ value: string; recordName: string; confidence: number }> = [];
const wordThreshold = 0.5; // 50% of words must match
for (const record of biaData) {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
const wordSimilarity = wordBasedSimilarity(normalizedAppName, normalizedRecordName);
if (wordSimilarity >= wordThreshold) {
// Combine word similarity with length similarity for better scoring
const lengthSimilarity = 1 - Math.abs(normalizedAppName.length - normalizedRecordName.length) / Math.max(normalizedAppName.length, normalizedRecordName.length);
const confidence = (wordSimilarity * 0.7) + (lengthSimilarity * 0.3);
allWordMatches.push({
value: record.biaValue,
recordName: record.applicationName,
confidence,
});
if (!bestWordMatch || confidence > bestWordMatch.confidence) {
bestWordMatch = {
value: record.biaValue,
recordName: record.applicationName,
confidence,
};
}
}
}
if (allWordMatches.length > 0) {
logger.info(`[BIA MATCH] Step 2.6: Found ${allWordMatches.length} word-based matches: ${allWordMatches.slice(0, 5).map(m => `"${m.recordName}" (${(m.confidence * 100).toFixed(1)}%)`).join(', ')}`);
if (allWordMatches.length > 1) {
logger.info(`[BIA MATCH] Multiple word-based matches found! All matches: ${allWordMatches.map(m => `"${m.recordName}" (${(m.confidence * 100).toFixed(1)}%)`).join(', ')}`);
}
}
// Step 3: Try fuzzy matching with threshold
// Note: Fuzzy matches are shown to users with confidence percentage so they can verify
// Basic safeguard: Require some word overlap OR high similarity to prevent completely unrelated matches
logger.info(`[BIA MATCH] Step 3: Trying fuzzy match (Levenshtein distance) for "${normalizedAppName}"`);
let bestMatch: { value: string; similarity: number; recordName: string } | null = null;
const threshold = 0.6; // 60% similarity threshold
const minWordSimilarity = 0.05; // Require at least 5% word overlap OR high similarity (80%+)
const highSimilarityThreshold = 0.8; // If similarity is this high, word overlap not required
const allFuzzyMatches: Array<{ value: string; recordName: string; similarity: number }> = [];
// Try fuzzy match on name
for (const record of biaData) {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
const similarity = calculateSimilarity(normalizedAppName, normalizedRecordName);
const wordSim = wordBasedSimilarity(normalizedAppName, normalizedRecordName);
// Accept if: (high similarity) OR (medium similarity + word overlap)
// This allows high-confidence matches even without word overlap, but requires word overlap for medium confidence
const isHighSimilarity = similarity >= highSimilarityThreshold;
const hasWordOverlap = wordSim >= minWordSimilarity;
const isAcceptable = similarity >= threshold && (isHighSimilarity || hasWordOverlap);
if (isAcceptable) {
allFuzzyMatches.push({
value: record.biaValue,
recordName: record.applicationName,
similarity,
});
if (!bestMatch || similarity > bestMatch.similarity) {
bestMatch = {
value: record.biaValue,
similarity: similarity,
recordName: record.applicationName,
};
}
} else if (similarity >= threshold) {
logger.debug(`[BIA MATCH] Rejected fuzzy match "${record.applicationName}" (similarity: ${(similarity * 100).toFixed(1)}%, word similarity: ${(wordSim * 100).toFixed(1)}% - insufficient word overlap and similarity below high threshold)`);
}
}
// Also try fuzzy match on search reference if available
if (normalizedSearchRef) {
logger.info(`[BIA MATCH] Step 3: Also trying fuzzy match on search reference "${normalizedSearchRef}"`);
for (const record of biaData) {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
const similarity = calculateSimilarity(normalizedSearchRef, normalizedRecordName);
const wordSim = wordBasedSimilarity(normalizedSearchRef, normalizedRecordName);
// Same logic: high similarity OR (medium similarity + word overlap)
const isHighSimilarity = similarity >= highSimilarityThreshold;
const hasWordOverlap = wordSim >= minWordSimilarity;
const isAcceptable = similarity >= threshold && (isHighSimilarity || hasWordOverlap);
if (isAcceptable) {
allFuzzyMatches.push({
value: record.biaValue,
recordName: record.applicationName,
similarity,
});
if (!bestMatch || similarity > bestMatch.similarity) {
bestMatch = {
value: record.biaValue,
similarity: similarity,
recordName: record.applicationName,
};
}
} else if (similarity >= threshold) {
logger.debug(`[BIA MATCH] Rejected fuzzy match "${record.applicationName}" via search ref (similarity: ${(similarity * 100).toFixed(1)}%, word similarity: ${(wordSim * 100).toFixed(1)}% - insufficient word overlap and similarity below high threshold)`);
}
}
}
if (allFuzzyMatches.length > 0) {
logger.info(`[BIA MATCH] Step 3: Found ${allFuzzyMatches.length} fuzzy matches above threshold: ${allFuzzyMatches.slice(0, 5).map(m => `"${m.recordName}" (${(m.similarity * 100).toFixed(1)}%)`).join(', ')}`);
if (allFuzzyMatches.length > 1) {
logger.info(`[BIA MATCH] Multiple fuzzy matches found! All matches: ${allFuzzyMatches.map(m => `"${m.recordName}" (${(m.similarity * 100).toFixed(1)}%)`).join(', ')}`);
}
} else {
logger.info(`[BIA MATCH] Step 3: No fuzzy matches found above threshold (${(threshold * 100).toFixed(0)}%)`);
}
// Choose the best match from all available options
// Priority: partial match > word-based match > fuzzy match
if (bestPartialMatch && bestPartialMatch.confidence >= 0.4) {
const sortedMatches = allPartialMatches
.filter(m => m.confidence >= 0.4)
.sort((a, b) => b.confidence - a.confidence)
.map(m => ({
excelApplicationName: m.recordName,
biaValue: m.value,
matchType: m.type as 'partial_starts' | 'partial_contains',
confidence: m.confidence,
}));
logger.info(`[BIA MATCH] ✓ Selected partial match: "${applicationName}" -> "${bestPartialMatch.recordName}": ${bestPartialMatch.value} (confidence: ${(bestPartialMatch.confidence * 100).toFixed(1)}%)`);
return {
biaValue: bestPartialMatch.value,
excelApplicationName: bestPartialMatch.recordName,
matchType: 'fuzzy',
matchConfidence: bestPartialMatch.confidence,
allMatches: sortedMatches.length > 1 ? sortedMatches : undefined,
};
} else if (bestWordMatch && bestWordMatch.confidence >= 0.5) {
const sortedMatches = allWordMatches
.filter(m => m.confidence >= 0.5)
.sort((a, b) => b.confidence - a.confidence)
.map(m => ({
excelApplicationName: m.recordName,
biaValue: m.value,
matchType: 'fuzzy' as const,
confidence: m.confidence,
}));
logger.info(`[BIA MATCH] ✓ Selected word-based match: "${applicationName}" -> "${bestWordMatch.recordName}": ${bestWordMatch.value} (confidence: ${(bestWordMatch.confidence * 100).toFixed(1)}%)`);
return {
biaValue: bestWordMatch.value,
excelApplicationName: bestWordMatch.recordName,
matchType: 'fuzzy',
matchConfidence: bestWordMatch.confidence,
allMatches: sortedMatches.length > 1 ? sortedMatches : undefined,
};
} else if (bestMatch) {
const sortedMatches = allFuzzyMatches
.sort((a, b) => b.similarity - a.similarity)
.map(m => ({
excelApplicationName: m.recordName,
biaValue: m.value,
matchType: 'fuzzy' as const,
confidence: m.similarity,
}));
logger.info(`[BIA MATCH] ✓ Selected fuzzy match: "${applicationName}" -> "${bestMatch.recordName}": ${bestMatch.value} (similarity: ${(bestMatch.similarity * 100).toFixed(1)}%)`);
return {
biaValue: bestMatch.value,
excelApplicationName: bestMatch.recordName,
matchType: 'fuzzy',
matchConfidence: bestMatch.similarity,
allMatches: sortedMatches.length > 1 ? sortedMatches : undefined,
};
}
logger.warn(`[BIA MATCH] ✗ No BIA match found for "${applicationName}"${searchReference ? ` (searchRef: "${searchReference}")` : ''} after checking ${biaData.length} Excel records`);
logger.warn(`[BIA MATCH] Normalized app name: "${normalizedAppName}"`);
if (normalizedSearchRef) {
logger.warn(`[BIA MATCH] Normalized search ref: "${normalizedSearchRef}"`);
}
logger.warn(`[BIA MATCH] Sample Excel names to compare: ${biaData.slice(0, 10).map(r => `"${r.applicationName.toLowerCase().trim()}"`).join(', ')}`);
return {
biaValue: null,
excelApplicationName: null,
matchType: null,
};
}

View File

@@ -1,17 +1,18 @@
/**
* CacheStore - SQLite cache operations for CMDB objects
* CacheStore - Database-agnostic cache operations for CMDB objects
*
* Provides fast local storage for CMDB data synced from Jira Assets.
* Uses the generated schema for type-safe operations.
* Uses database adapter pattern to support both SQLite and PostgreSQL.
*/
import Database from 'better-sqlite3';
import { join, dirname } from 'path';
import * as path from 'path';
import * as fs from 'fs';
import { logger } from './logger.js';
import type { CMDBObject, CMDBObjectTypeName, ObjectReference } from '../generated/jira-types.js';
import { getReferenceAttributes } from '../generated/jira-schema.js';
import { createDatabaseAdapter } from './database/factory.js';
import type { DatabaseAdapter } from './database/interface.js';
// Get current directory for ESM
const currentFileUrl = new URL(import.meta.url);
@@ -37,75 +38,150 @@ export interface QueryOptions {
}
class CacheStore {
private db: Database.Database;
private db: DatabaseAdapter;
private initialized: boolean = false;
private initializationPromise: Promise<void> | null = null;
private isPostgres: boolean = false;
constructor() {
// Ensure data directory exists
const dataDir = dirname(CACHE_DB_PATH);
if (!fs.existsSync(dataDir)) {
fs.mkdirSync(dataDir, { recursive: true });
}
this.db = new Database(CACHE_DB_PATH);
this.initialize();
// Create database adapter based on environment
this.db = createDatabaseAdapter(
process.env.DATABASE_TYPE,
process.env.DATABASE_TYPE === 'postgres' || process.env.DATABASE_TYPE === 'postgresql'
? undefined
: CACHE_DB_PATH
);
this.isPostgres = (process.env.DATABASE_TYPE === 'postgres' || process.env.DATABASE_TYPE === 'postgresql');
// Start initialization but don't wait for it
this.initializationPromise = this.initialize();
}
private initialize(): void {
/**
* Ensure database is initialized before executing queries
*/
private async ensureInitialized(): Promise<void> {
if (this.initialized) return;
if (this.initializationPromise) {
await this.initializationPromise;
return;
}
// If for some reason initialization wasn't started, start it now
this.initializationPromise = this.initialize();
await this.initializationPromise;
}
private async initialize(): Promise<void> {
if (this.initialized) return;
// Read and execute the generated schema
const schemaPath = join(__dirname, '../generated/db-schema.sql');
if (fs.existsSync(schemaPath)) {
const schema = fs.readFileSync(schemaPath, 'utf-8');
this.db.exec(schema);
logger.info('CacheStore: Database schema initialized from generated file');
} else {
// Fallback: create tables directly
this.db.exec(`
CREATE TABLE IF NOT EXISTS cached_objects (
id TEXT PRIMARY KEY,
object_key TEXT NOT NULL UNIQUE,
object_type TEXT NOT NULL,
label TEXT NOT NULL,
data JSON NOT NULL,
jira_updated_at TEXT,
jira_created_at TEXT,
cached_at TEXT NOT NULL
);
try {
// Determine which schema file to use
const schemaPath = this.isPostgres
? join(__dirname, '../generated/db-schema-postgres.sql')
: join(__dirname, '../generated/db-schema.sql');
if (fs.existsSync(schemaPath)) {
const schema = fs.readFileSync(schemaPath, 'utf-8');
await this.db.exec(schema);
logger.info(`CacheStore: Database schema initialized from generated file (${this.isPostgres ? 'PostgreSQL' : 'SQLite'})`);
} else {
// Fallback: create tables directly
const schema = this.isPostgres ? this.getPostgresFallbackSchema() : this.getSqliteFallbackSchema();
await this.db.exec(schema);
logger.info(`CacheStore: Database schema initialized (fallback, ${this.isPostgres ? 'PostgreSQL' : 'SQLite'})`);
}
CREATE TABLE IF NOT EXISTS object_relations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
source_id TEXT NOT NULL,
target_id TEXT NOT NULL,
attribute_name TEXT NOT NULL,
source_type TEXT NOT NULL,
target_type TEXT NOT NULL,
UNIQUE(source_id, target_id, attribute_name)
);
CREATE TABLE IF NOT EXISTS sync_metadata (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_objects_type ON cached_objects(object_type);
CREATE INDEX IF NOT EXISTS idx_objects_key ON cached_objects(object_key);
CREATE INDEX IF NOT EXISTS idx_objects_updated ON cached_objects(jira_updated_at);
CREATE INDEX IF NOT EXISTS idx_objects_label ON cached_objects(label);
CREATE INDEX IF NOT EXISTS idx_relations_source ON object_relations(source_id);
CREATE INDEX IF NOT EXISTS idx_relations_target ON object_relations(target_id);
CREATE INDEX IF NOT EXISTS idx_relations_source_type ON object_relations(source_type);
CREATE INDEX IF NOT EXISTS idx_relations_target_type ON object_relations(target_type);
CREATE INDEX IF NOT EXISTS idx_relations_attr ON object_relations(attribute_name);
`);
logger.info('CacheStore: Database schema initialized (fallback)');
this.initialized = true;
logger.info('CacheStore: Database initialization complete');
} catch (error) {
logger.error('CacheStore: Failed to initialize database schema', error);
// Don't throw - allow app to continue, but queries will fail gracefully
logger.warn('CacheStore: Continuing without database initialization - cache operations may fail');
}
}
this.initialized = true;
private getSqliteFallbackSchema(): string {
return `
CREATE TABLE IF NOT EXISTS cached_objects (
id TEXT PRIMARY KEY,
object_key TEXT NOT NULL UNIQUE,
object_type TEXT NOT NULL,
label TEXT NOT NULL,
data JSON NOT NULL,
jira_updated_at TEXT,
jira_created_at TEXT,
cached_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS object_relations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
source_id TEXT NOT NULL,
target_id TEXT NOT NULL,
attribute_name TEXT NOT NULL,
source_type TEXT NOT NULL,
target_type TEXT NOT NULL,
UNIQUE(source_id, target_id, attribute_name)
);
CREATE TABLE IF NOT EXISTS sync_metadata (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_objects_type ON cached_objects(object_type);
CREATE INDEX IF NOT EXISTS idx_objects_key ON cached_objects(object_key);
CREATE INDEX IF NOT EXISTS idx_objects_updated ON cached_objects(jira_updated_at);
CREATE INDEX IF NOT EXISTS idx_objects_label ON cached_objects(label);
CREATE INDEX IF NOT EXISTS idx_relations_source ON object_relations(source_id);
CREATE INDEX IF NOT EXISTS idx_relations_target ON object_relations(target_id);
CREATE INDEX IF NOT EXISTS idx_relations_source_type ON object_relations(source_type);
CREATE INDEX IF NOT EXISTS idx_relations_target_type ON object_relations(target_type);
CREATE INDEX IF NOT EXISTS idx_relations_attr ON object_relations(attribute_name);
`;
}
private getPostgresFallbackSchema(): string {
return `
CREATE TABLE IF NOT EXISTS cached_objects (
id TEXT PRIMARY KEY,
object_key TEXT NOT NULL UNIQUE,
object_type TEXT NOT NULL,
label TEXT NOT NULL,
data JSONB NOT NULL,
jira_updated_at TEXT,
jira_created_at TEXT,
cached_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS object_relations (
id SERIAL PRIMARY KEY,
source_id TEXT NOT NULL,
target_id TEXT NOT NULL,
attribute_name TEXT NOT NULL,
source_type TEXT NOT NULL,
target_type TEXT NOT NULL,
UNIQUE(source_id, target_id, attribute_name)
);
CREATE TABLE IF NOT EXISTS sync_metadata (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_objects_type ON cached_objects(object_type);
CREATE INDEX IF NOT EXISTS idx_objects_key ON cached_objects(object_key);
CREATE INDEX IF NOT EXISTS idx_objects_updated ON cached_objects(jira_updated_at);
CREATE INDEX IF NOT EXISTS idx_objects_label ON cached_objects(label);
CREATE INDEX IF NOT EXISTS idx_objects_data_gin ON cached_objects USING GIN (data);
CREATE INDEX IF NOT EXISTS idx_relations_source ON object_relations(source_id);
CREATE INDEX IF NOT EXISTS idx_relations_target ON object_relations(target_id);
CREATE INDEX IF NOT EXISTS idx_relations_source_type ON object_relations(source_type);
CREATE INDEX IF NOT EXISTS idx_relations_target_type ON object_relations(target_type);
CREATE INDEX IF NOT EXISTS idx_relations_attr ON object_relations(attribute_name);
`;
}
// ==========================================================================
@@ -115,19 +191,26 @@ class CacheStore {
/**
* Get a single object by ID
*/
getObject<T extends CMDBObject>(typeName: CMDBObjectTypeName, id: string): T | null {
const stmt = this.db.prepare(`
SELECT data FROM cached_objects
WHERE id = ? AND object_type = ?
`);
const row = stmt.get(id, typeName) as { data: string } | undefined;
if (!row) return null;
async getObject<T extends CMDBObject>(typeName: CMDBObjectTypeName, id: string): Promise<T | null> {
try {
return JSON.parse(row.data) as T;
await this.ensureInitialized();
const row = await this.db.queryOne<{ data: string | object }>(`
SELECT data FROM cached_objects
WHERE id = ? AND object_type = ?
`, [id, typeName]);
if (!row) return null;
try {
// PostgreSQL returns JSONB as object, SQLite as string
const data = typeof row.data === 'string' ? JSON.parse(row.data) : row.data;
return data as T;
} catch (error) {
logger.error(`CacheStore: Failed to parse object ${id}`, error);
return null;
}
} catch (error) {
logger.error(`CacheStore: Failed to parse object ${id}`, error);
logger.error(`CacheStore: Failed to get object ${id}`, error);
return null;
}
}
@@ -135,17 +218,18 @@ class CacheStore {
/**
* Get a single object by object key (e.g., "ICMT-123")
*/
getObjectByKey<T extends CMDBObject>(typeName: CMDBObjectTypeName, objectKey: string): T | null {
const stmt = this.db.prepare(`
async getObjectByKey<T extends CMDBObject>(typeName: CMDBObjectTypeName, objectKey: string): Promise<T | null> {
await this.ensureInitialized();
const row = await this.db.queryOne<{ data: string | object }>(`
SELECT data FROM cached_objects
WHERE object_key = ? AND object_type = ?
`);
const row = stmt.get(objectKey, typeName) as { data: string } | undefined;
`, [objectKey, typeName]);
if (!row) return null;
try {
return JSON.parse(row.data) as T;
const data = typeof row.data === 'string' ? JSON.parse(row.data) : row.data;
return data as T;
} catch (error) {
logger.error(`CacheStore: Failed to parse object ${objectKey}`, error);
return null;
@@ -155,68 +239,81 @@ class CacheStore {
/**
* Get all objects of a specific type
*/
getObjects<T extends CMDBObject>(
async getObjects<T extends CMDBObject>(
typeName: CMDBObjectTypeName,
options?: QueryOptions
): T[] {
const limit = options?.limit || 10000;
const offset = options?.offset || 0;
const orderBy = options?.orderBy || 'label';
const orderDir = options?.orderDir || 'ASC';
): Promise<T[]> {
try {
await this.ensureInitialized();
const limit = options?.limit || 10000;
const offset = options?.offset || 0;
const orderBy = options?.orderBy || 'label';
const orderDir = options?.orderDir || 'ASC';
const stmt = this.db.prepare(`
SELECT data FROM cached_objects
WHERE object_type = ?
ORDER BY ${orderBy} ${orderDir}
LIMIT ? OFFSET ?
`);
const rows = stmt.all(typeName, limit, offset) as { data: string }[];
return rows.map(row => {
try {
return JSON.parse(row.data) as T;
} catch {
return null;
}
}).filter((obj): obj is T => obj !== null);
// Sanitize orderBy to prevent SQL injection
const safeOrderBy = ['id', 'object_key', 'object_type', 'label', 'cached_at'].includes(orderBy)
? orderBy
: 'label';
const safeOrderDir = orderDir === 'DESC' ? 'DESC' : 'ASC';
const rows = await this.db.query<{ data: string | object }>(`
SELECT data FROM cached_objects
WHERE object_type = ?
ORDER BY ${safeOrderBy} ${safeOrderDir}
LIMIT ? OFFSET ?
`, [typeName, limit, offset]);
return rows.map(row => {
try {
const data = typeof row.data === 'string' ? JSON.parse(row.data) : row.data;
return data as T;
} catch {
return null;
}
}).filter((obj): obj is T => obj !== null);
} catch (error) {
logger.error(`CacheStore: Failed to get objects for type ${typeName}`, error);
return [];
}
}
/**
* Count objects of a specific type
*/
countObjects(typeName: CMDBObjectTypeName): number {
const stmt = this.db.prepare(`
async countObjects(typeName: CMDBObjectTypeName): Promise<number> {
await this.ensureInitialized();
const row = await this.db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count FROM cached_objects
WHERE object_type = ?
`);
const row = stmt.get(typeName) as { count: number };
return row.count;
`, [typeName]);
return row?.count || 0;
}
/**
* Search objects by label (case-insensitive)
*/
searchByLabel<T extends CMDBObject>(
async searchByLabel<T extends CMDBObject>(
typeName: CMDBObjectTypeName,
searchTerm: string,
options?: QueryOptions
): T[] {
): Promise<T[]> {
await this.ensureInitialized();
const limit = options?.limit || 100;
const offset = options?.offset || 0;
const stmt = this.db.prepare(`
// Use database-agnostic case-insensitive search
const likeOperator = this.isPostgres ? 'ILIKE' : 'LIKE';
const rows = await this.db.query<{ data: string | object }>(`
SELECT data FROM cached_objects
WHERE object_type = ? AND label LIKE ?
WHERE object_type = ? AND label ${likeOperator} ?
ORDER BY label ASC
LIMIT ? OFFSET ?
`);
const rows = stmt.all(typeName, `%${searchTerm}%`, limit, offset) as { data: string }[];
`, [typeName, `%${searchTerm}%`, limit, offset]);
return rows.map(row => {
try {
return JSON.parse(row.data) as T;
const data = typeof row.data === 'string' ? JSON.parse(row.data) : row.data;
return data as T;
} catch {
return null;
}
@@ -226,23 +323,24 @@ class CacheStore {
/**
* Search across all object types
*/
searchAllTypes(searchTerm: string, options?: QueryOptions): CMDBObject[] {
async searchAllTypes(searchTerm: string, options?: QueryOptions): Promise<CMDBObject[]> {
await this.ensureInitialized();
const limit = options?.limit || 100;
const offset = options?.offset || 0;
const stmt = this.db.prepare(`
// Use database-agnostic case-insensitive search
const likeOperator = this.isPostgres ? 'ILIKE' : 'LIKE';
const rows = await this.db.query<{ data: string | object }>(`
SELECT data FROM cached_objects
WHERE label LIKE ? OR object_key LIKE ?
WHERE label ${likeOperator} ? OR object_key ${likeOperator} ?
ORDER BY object_type, label ASC
LIMIT ? OFFSET ?
`);
const pattern = `%${searchTerm}%`;
const rows = stmt.all(pattern, pattern, limit, offset) as { data: string }[];
`, [`%${searchTerm}%`, `%${searchTerm}%`, limit, offset]);
return rows.map(row => {
try {
return JSON.parse(row.data) as CMDBObject;
const data = typeof row.data === 'string' ? JSON.parse(row.data) : row.data;
return data as CMDBObject;
} catch {
return null;
}
@@ -252,8 +350,9 @@ class CacheStore {
/**
* Upsert a single object
*/
upsertObject<T extends CMDBObject>(typeName: CMDBObjectTypeName, object: T): void {
const stmt = this.db.prepare(`
async upsertObject<T extends CMDBObject>(typeName: CMDBObjectTypeName, object: T): Promise<void> {
await this.ensureInitialized();
await this.db.execute(`
INSERT INTO cached_objects (id, object_key, object_type, label, data, jira_updated_at, jira_created_at, cached_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
@@ -262,9 +361,7 @@ class CacheStore {
data = excluded.data,
jira_updated_at = excluded.jira_updated_at,
cached_at = excluded.cached_at
`);
stmt.run(
`, [
object.id,
object.objectKey,
typeName,
@@ -273,30 +370,29 @@ class CacheStore {
object._jiraUpdatedAt || null,
object._jiraCreatedAt || null,
new Date().toISOString()
);
]);
}
/**
* Batch upsert objects (much faster for bulk operations)
*/
batchUpsertObjects<T extends CMDBObject>(typeName: CMDBObjectTypeName, objects: T[]): void {
async batchUpsertObjects<T extends CMDBObject>(typeName: CMDBObjectTypeName, objects: T[]): Promise<void> {
await this.ensureInitialized();
if (objects.length === 0) return;
const stmt = this.db.prepare(`
INSERT INTO cached_objects (id, object_key, object_type, label, data, jira_updated_at, jira_created_at, cached_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
object_key = excluded.object_key,
label = excluded.label,
data = excluded.data,
jira_updated_at = excluded.jira_updated_at,
cached_at = excluded.cached_at
`);
const now = new Date().toISOString();
const batchInsert = this.db.transaction((objs: T[]) => {
for (const obj of objs) {
stmt.run(
await this.db.transaction(async (db) => {
const now = new Date().toISOString();
for (const obj of objects) {
await db.execute(`
INSERT INTO cached_objects (id, object_key, object_type, label, data, jira_updated_at, jira_created_at, cached_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
object_key = excluded.object_key,
label = excluded.label,
data = excluded.data,
jira_updated_at = excluded.jira_updated_at,
cached_at = excluded.cached_at
`, [
obj.id,
obj.objectKey,
typeName,
@@ -305,61 +401,60 @@ class CacheStore {
obj._jiraUpdatedAt || null,
obj._jiraCreatedAt || null,
now
);
]);
}
});
batchInsert(objects);
logger.debug(`CacheStore: Batch upserted ${objects.length} ${typeName} objects`);
}
/**
* Delete an object by ID
*/
deleteObject(typeName: CMDBObjectTypeName, id: string): boolean {
const stmt = this.db.prepare(`
async deleteObject(typeName: CMDBObjectTypeName, id: string): Promise<boolean> {
await this.ensureInitialized();
const changes = await this.db.execute(`
DELETE FROM cached_objects
WHERE id = ? AND object_type = ?
`);
const result = stmt.run(id, typeName);
`, [id, typeName]);
// Also delete related relations
this.deleteRelationsForObject(id);
await this.deleteRelationsForObject(id);
return result.changes > 0;
return changes > 0;
}
/**
* Clear all objects of a specific type
*/
clearObjectType(typeName: CMDBObjectTypeName): number {
async clearObjectType(typeName: CMDBObjectTypeName): Promise<number> {
await this.ensureInitialized();
// First get all IDs to delete relations
const idsStmt = this.db.prepare(`
const ids = await this.db.query<{ id: string }>(`
SELECT id FROM cached_objects WHERE object_type = ?
`);
const ids = idsStmt.all(typeName) as { id: string }[];
`, [typeName]);
// Delete relations
for (const { id } of ids) {
this.deleteRelationsForObject(id);
await this.deleteRelationsForObject(id);
}
// Delete objects
const stmt = this.db.prepare(`
const changes = await this.db.execute(`
DELETE FROM cached_objects WHERE object_type = ?
`);
const result = stmt.run(typeName);
`, [typeName]);
logger.info(`CacheStore: Cleared ${result.changes} ${typeName} objects`);
return result.changes;
logger.info(`CacheStore: Cleared ${changes} ${typeName} objects`);
return changes;
}
/**
* Clear entire cache
*/
clearAll(): void {
this.db.exec('DELETE FROM cached_objects');
this.db.exec('DELETE FROM object_relations');
async clearAll(): Promise<void> {
await this.ensureInitialized();
await this.db.execute('DELETE FROM cached_objects');
await this.db.execute('DELETE FROM object_relations');
logger.info('CacheStore: Cleared all cached data');
}
@@ -370,62 +465,60 @@ class CacheStore {
/**
* Store a relation between two objects
*/
upsertRelation(
async upsertRelation(
sourceId: string,
targetId: string,
attributeName: string,
sourceType: string,
targetType: string
): void {
const stmt = this.db.prepare(`
): Promise<void> {
await this.ensureInitialized();
await this.db.execute(`
INSERT INTO object_relations (source_id, target_id, attribute_name, source_type, target_type)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(source_id, target_id, attribute_name) DO UPDATE SET
source_type = excluded.source_type,
target_type = excluded.target_type
`);
stmt.run(sourceId, targetId, attributeName, sourceType, targetType);
`, [sourceId, targetId, attributeName, sourceType, targetType]);
}
/**
* Batch upsert relations
*/
batchUpsertRelations(relations: Array<{
async batchUpsertRelations(relations: Array<{
sourceId: string;
targetId: string;
attributeName: string;
sourceType: string;
targetType: string;
}>): void {
}>): Promise<void> {
await this.ensureInitialized();
if (relations.length === 0) return;
const stmt = this.db.prepare(`
INSERT INTO object_relations (source_id, target_id, attribute_name, source_type, target_type)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(source_id, target_id, attribute_name) DO UPDATE SET
source_type = excluded.source_type,
target_type = excluded.target_type
`);
const batchInsert = this.db.transaction((rels: typeof relations) => {
for (const rel of rels) {
stmt.run(rel.sourceId, rel.targetId, rel.attributeName, rel.sourceType, rel.targetType);
await this.db.transaction(async (db) => {
for (const rel of relations) {
await db.execute(`
INSERT INTO object_relations (source_id, target_id, attribute_name, source_type, target_type)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(source_id, target_id, attribute_name) DO UPDATE SET
source_type = excluded.source_type,
target_type = excluded.target_type
`, [rel.sourceId, rel.targetId, rel.attributeName, rel.sourceType, rel.targetType]);
}
});
batchInsert(relations);
logger.debug(`CacheStore: Batch upserted ${relations.length} relations`);
}
/**
* Get related objects (outbound references from an object)
*/
getRelatedObjects<T extends CMDBObject>(
async getRelatedObjects<T extends CMDBObject>(
sourceId: string,
targetTypeName: CMDBObjectTypeName,
attributeName?: string
): T[] {
): Promise<T[]> {
await this.ensureInitialized();
let query = `
SELECT co.data FROM cached_objects co
JOIN object_relations rel ON co.id = rel.target_id
@@ -438,12 +531,12 @@ class CacheStore {
params.push(attributeName);
}
const stmt = this.db.prepare(query);
const rows = stmt.all(...params) as { data: string }[];
const rows = await this.db.query<{ data: string | object }>(query, params);
return rows.map(row => {
try {
return JSON.parse(row.data) as T;
const data = typeof row.data === 'string' ? JSON.parse(row.data) : row.data;
return data as T;
} catch {
return null;
}
@@ -453,11 +546,12 @@ class CacheStore {
/**
* Get objects that reference the given object (inbound references)
*/
getReferencingObjects<T extends CMDBObject>(
async getReferencingObjects<T extends CMDBObject>(
targetId: string,
sourceTypeName: CMDBObjectTypeName,
attributeName?: string
): T[] {
): Promise<T[]> {
await this.ensureInitialized();
let query = `
SELECT co.data FROM cached_objects co
JOIN object_relations rel ON co.id = rel.source_id
@@ -470,12 +564,12 @@ class CacheStore {
params.push(attributeName);
}
const stmt = this.db.prepare(query);
const rows = stmt.all(...params) as { data: string }[];
const rows = await this.db.query<{ data: string | object }>(query, params);
return rows.map(row => {
try {
return JSON.parse(row.data) as T;
const data = typeof row.data === 'string' ? JSON.parse(row.data) : row.data;
return data as T;
} catch {
return null;
}
@@ -485,18 +579,19 @@ class CacheStore {
/**
* Delete all relations for an object
*/
deleteRelationsForObject(objectId: string): void {
const stmt = this.db.prepare(`
async deleteRelationsForObject(objectId: string): Promise<void> {
await this.ensureInitialized();
await this.db.execute(`
DELETE FROM object_relations
WHERE source_id = ? OR target_id = ?
`);
stmt.run(objectId, objectId);
`, [objectId, objectId]);
}
/**
* Extract and store relations from an object based on its type schema
*/
extractAndStoreRelations<T extends CMDBObject>(typeName: CMDBObjectTypeName, object: T): void {
async extractAndStoreRelations<T extends CMDBObject>(typeName: CMDBObjectTypeName, object: T): Promise<void> {
await this.ensureInitialized();
const refAttributes = getReferenceAttributes(typeName);
const relations: Array<{
sourceId: string;
@@ -540,7 +635,7 @@ class CacheStore {
}
if (relations.length > 0) {
this.batchUpsertRelations(relations);
await this.batchUpsertRelations(relations);
}
}
@@ -551,36 +646,36 @@ class CacheStore {
/**
* Get sync metadata value
*/
getSyncMetadata(key: string): string | null {
const stmt = this.db.prepare(`
async getSyncMetadata(key: string): Promise<string | null> {
await this.ensureInitialized();
const row = await this.db.queryOne<{ value: string }>(`
SELECT value FROM sync_metadata WHERE key = ?
`);
const row = stmt.get(key) as { value: string } | undefined;
`, [key]);
return row?.value || null;
}
/**
* Set sync metadata value
*/
setSyncMetadata(key: string, value: string): void {
const stmt = this.db.prepare(`
async setSyncMetadata(key: string, value: string): Promise<void> {
await this.ensureInitialized();
await this.db.execute(`
INSERT INTO sync_metadata (key, value, updated_at)
VALUES (?, ?, ?)
ON CONFLICT(key) DO UPDATE SET
value = excluded.value,
updated_at = excluded.updated_at
`);
stmt.run(key, value, new Date().toISOString());
`, [key, value, new Date().toISOString()]);
}
/**
* Delete sync metadata
*/
deleteSyncMetadata(key: string): void {
const stmt = this.db.prepare(`
async deleteSyncMetadata(key: string): Promise<void> {
await this.ensureInitialized();
await this.db.execute(`
DELETE FROM sync_metadata WHERE key = ?
`);
stmt.run(key);
`, [key]);
}
// ==========================================================================
@@ -590,14 +685,14 @@ class CacheStore {
/**
* Get cache statistics
*/
getStats(): CacheStats {
async getStats(): Promise<CacheStats> {
await this.ensureInitialized();
// Count by type
const typeCountStmt = this.db.prepare(`
const typeCounts = await this.db.query<{ object_type: string; count: number }>(`
SELECT object_type, COUNT(*) as count
FROM cached_objects
GROUP BY object_type
`);
const typeCounts = typeCountStmt.all() as { object_type: string; count: number }[];
const objectsByType: Record<string, number> = {};
let totalObjects = 0;
@@ -607,25 +702,22 @@ class CacheStore {
}
// Count relations
const relCountStmt = this.db.prepare(`
const relCountRow = await this.db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count FROM object_relations
`);
const relCount = (relCountStmt.get() as { count: number }).count;
const relCount = relCountRow?.count || 0;
// Get sync metadata
const lastFullSync = this.getSyncMetadata('lastFullSync');
const lastIncrementalSync = this.getSyncMetadata('lastIncrementalSync');
const lastFullSync = await this.getSyncMetadata('lastFullSync');
const lastIncrementalSync = await this.getSyncMetadata('lastIncrementalSync');
// Check if cache is warm (has Application Components)
const isWarm = (objectsByType['ApplicationComponent'] || 0) > 0;
// Get database file size
// Get database size
let dbSizeBytes = 0;
try {
const stats = fs.statSync(CACHE_DB_PATH);
dbSizeBytes = stats.size;
} catch {
// Ignore
if (this.db.getSizeBytes) {
dbSizeBytes = await this.db.getSizeBytes();
}
return {
@@ -642,19 +734,19 @@ class CacheStore {
/**
* Check if cache is warm (has data)
*/
isWarm(): boolean {
const count = this.countObjects('ApplicationComponent');
async isWarm(): Promise<boolean> {
await this.ensureInitialized();
const count = await this.countObjects('ApplicationComponent');
return count > 0;
}
/**
* Close database connection
*/
close(): void {
this.db.close();
async close(): Promise<void> {
await this.db.close();
}
}
// Export singleton instance
export const cacheStore = new CacheStore();

View File

@@ -4,10 +4,9 @@ import { config } from '../config/env.js';
import { logger } from './logger.js';
import type { ApplicationDetails, AISuggestion, ZiraTaxonomy, ReferenceValue, ChatMessage, ChatConversation, ChatResponse } from '../types/index.js';
import { dataService } from './dataService.js';
import { readFileSync, existsSync } from 'fs';
import { readFileSync } from 'fs';
import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
import * as XLSX from 'xlsx';
import { randomUUID } from 'crypto';
// AI Provider type
@@ -48,233 +47,19 @@ try {
ziraTaxonomy = { version: '', source: '', lastUpdated: '', domains: [] };
}
// BIA Excel data cache
interface BIARecord {
applicationName: string;
biaValue: string;
// Find BIA value for application using the unified matching service
// This uses the same matching logic as the BIA Sync Dashboard for consistency
async function findBIAValue(applicationName: string, searchReference?: string | null): Promise<string | null> {
// Use the unified matching service (imported at top of file)
const { findBIAMatch } = await import('./biaMatchingService.js');
const matchResult = findBIAMatch(applicationName, searchReference || null);
return matchResult.biaValue || null;
}
let biaDataCache: BIARecord[] | null = null;
let biaDataCacheTimestamp: number = 0;
const BIA_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
// Load BIA data from Excel file
function loadBIAData(): BIARecord[] {
const now = Date.now();
// Return cached data if still valid
if (biaDataCache && (now - biaDataCacheTimestamp) < BIA_CACHE_TTL) {
return biaDataCache;
}
// Path to BIA.xlsx: from compiled location (dist/services/) go up 2 levels to backend/, then into data/
const biaFilePath = join(__dirname, '../../data/BIA.xlsx');
if (!existsSync(biaFilePath)) {
logger.warn(`BIA.xlsx file not found at ${biaFilePath}, skipping BIA lookup`);
biaDataCache = [];
biaDataCacheTimestamp = now;
return [];
}
logger.debug(`Loading BIA data from: ${biaFilePath}`);
try {
const workbook = XLSX.readFile(biaFilePath);
const sheetName = workbook.SheetNames[0];
const worksheet = workbook.Sheets[sheetName];
const data = XLSX.utils.sheet_to_json(worksheet, { header: 1 }) as any[][];
// Find the header row and determine column indices dynamically
let headerRowIndex = -1;
let applicationNameColumnIndex = -1;
let biaValueColumnIndex = -1;
// First, find the header row by looking for "BIA - Informatiemiddel" and "BIA - Bruto risicoscore"
for (let i = 0; i < Math.min(10, data.length); i++) {
const row = data[i];
if (!row || row.length < 3) continue;
// Search for "BIA - Informatiemiddel" (application name column)
for (let col = 0; col < row.length; col++) {
const cellValue = String(row[col] || '').trim().toLowerCase();
if (cellValue.includes('bia') && cellValue.includes('informatiemiddel')) {
applicationNameColumnIndex = col;
headerRowIndex = i;
break;
}
}
// If we found the application name column, now find "BIA - Bruto risicoscore"
if (headerRowIndex !== -1 && applicationNameColumnIndex !== -1) {
for (let col = 0; col < row.length; col++) {
const cellValue = String(row[col] || '').trim().toLowerCase();
if (cellValue.includes('bia') && cellValue.includes('bruto') && cellValue.includes('risicoscore')) {
biaValueColumnIndex = col;
break;
}
}
break;
}
}
if (headerRowIndex === -1 || applicationNameColumnIndex === -1) {
logger.warn('Could not find "BIA - Informatiemiddel" column in BIA.xlsx');
biaDataCache = [];
biaDataCacheTimestamp = now;
return [];
}
if (biaValueColumnIndex === -1) {
logger.warn('Could not find "BIA - Bruto risicoscore" column in BIA.xlsx');
biaDataCache = [];
biaDataCacheTimestamp = now;
return [];
}
logger.info(`Found BIA columns: Application name at column ${applicationNameColumnIndex + 1} (${String.fromCharCode(65 + applicationNameColumnIndex)}), BIA value at column ${biaValueColumnIndex + 1} (${String.fromCharCode(65 + biaValueColumnIndex)})`);
// Extract data starting from the row after the header
const records: BIARecord[] = [];
for (let i = headerRowIndex + 1; i < data.length; i++) {
const row = data[i];
if (row && row.length > applicationNameColumnIndex) {
const applicationName = String(row[applicationNameColumnIndex] || '').trim();
if (!applicationName || applicationName.length === 0) {
continue; // Skip empty rows
}
// Get BIA value from the dynamically found column
let biaValue = '';
if (row.length > biaValueColumnIndex) {
biaValue = String(row[biaValueColumnIndex] || '').trim().toUpperCase();
} else {
logger.debug(`Row ${i} does not have enough columns for BIA value (need column ${biaValueColumnIndex + 1})`);
}
// Extract just the letter if the value contains more than just A-F (e.g., "A - Test/Archief")
if (biaValue && !/^[A-F]$/.test(biaValue)) {
const match = biaValue.match(/^([A-F])/);
if (match) {
biaValue = match[1];
}
}
// Only add record if we have both application name and BIA value
if (applicationName && /^[A-F]$/.test(biaValue)) {
records.push({
applicationName: applicationName,
biaValue: biaValue,
});
}
}
}
logger.info(`Loaded ${records.length} BIA records from Excel file`);
biaDataCache = records;
biaDataCacheTimestamp = now;
return records;
} catch (error) {
logger.error('Failed to load BIA data from Excel', error);
biaDataCache = [];
biaDataCacheTimestamp = now;
return [];
}
}
// Calculate Levenshtein distance for fuzzy matching
function levenshteinDistance(str1: string, str2: string): number {
const matrix: number[][] = [];
const len1 = str1.length;
const len2 = str2.length;
if (len1 === 0) return len2;
if (len2 === 0) return len1;
for (let i = 0; i <= len1; i++) {
matrix[i] = [i];
}
for (let j = 0; j <= len2; j++) {
matrix[0][j] = j;
}
for (let i = 1; i <= len1; i++) {
for (let j = 1; j <= len2; j++) {
const cost = str1[i - 1] === str2[j - 1] ? 0 : 1;
matrix[i][j] = Math.min(
matrix[i - 1][j] + 1, // deletion
matrix[i][j - 1] + 1, // insertion
matrix[i - 1][j - 1] + cost // substitution
);
}
}
return matrix[len1][len2];
}
// Calculate similarity score (0-1, where 1 is identical)
function calculateSimilarity(str1: string, str2: string): number {
const maxLen = Math.max(str1.length, str2.length);
if (maxLen === 0) return 1;
const distance = levenshteinDistance(str1.toLowerCase(), str2.toLowerCase());
return 1 - (distance / maxLen);
}
// Find BIA value for application using exact match first, then fuzzy matching
function findBIAValue(applicationName: string): string | null {
const biaData = loadBIAData();
if (biaData.length === 0) {
logger.debug(`No BIA data available for lookup of "${applicationName}"`);
return null;
}
const normalizedAppName = applicationName.toLowerCase().trim();
// Step 1: Try exact match (case-insensitive)
for (const record of biaData) {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
if (normalizedAppName === normalizedRecordName) {
logger.info(`Found exact BIA match for "${applicationName}" -> "${record.applicationName}": ${record.biaValue}`);
return record.biaValue;
}
}
// Step 2: Try partial match (one name contains the other)
for (const record of biaData) {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
if (normalizedAppName.includes(normalizedRecordName) || normalizedRecordName.includes(normalizedAppName)) {
logger.info(`Found partial BIA match for "${applicationName}" -> "${record.applicationName}": ${record.biaValue}`);
return record.biaValue;
}
}
// Step 3: Try fuzzy matching with lower threshold
let bestMatch: { value: string; similarity: number; recordName: string } | null = null;
const threshold = 0.6; // Lowered threshold to 60% for better matching
for (const record of biaData) {
const normalizedRecordName = record.applicationName.toLowerCase().trim();
const similarity = calculateSimilarity(normalizedAppName, normalizedRecordName);
if (similarity >= threshold) {
if (!bestMatch || similarity > bestMatch.similarity) {
bestMatch = {
value: record.biaValue,
similarity: similarity,
recordName: record.applicationName,
};
}
}
}
if (bestMatch) {
logger.info(`Found fuzzy BIA match for "${applicationName}" -> "${bestMatch.recordName}": ${bestMatch.value} (similarity: ${(bestMatch.similarity * 100).toFixed(1)}%)`);
return bestMatch.value;
}
logger.debug(`No BIA match found for "${applicationName}" (checked ${biaData.length} records)`);
return null;
async function findBIAMatchResult(applicationName: string, searchReference?: string | null) {
// Use the unified matching service (imported at top of file)
const { findBIAMatch } = await import('./biaMatchingService.js');
return findBIAMatch(applicationName, searchReference || null);
}
// Get Governance Models with additional attributes (Remarks, Application)
@@ -984,8 +769,11 @@ class AIService {
const parsed = JSON.parse(jsonText);
// Check for BIA value in Excel file using fuzzy matching
const excelBIAValue = findBIAValue(application.name);
// Check for BIA value in Excel file using unified matching service
// This uses the same matching logic as the BIA Sync Dashboard
const biaMatchResult = await findBIAMatchResult(application.name, application.searchReference);
const excelBIAValue = biaMatchResult.biaValue;
const excelApplicationName = biaMatchResult.excelApplicationName;
// Parse BIA classification from AI response
let biaClassification = parsed.beheerclassificatie?.bia_classificatie ? {
@@ -996,11 +784,40 @@ class AIService {
// Override BIA classification if found in Excel file - Excel value ALWAYS takes precedence
if (excelBIAValue) {
const originalAIValue = biaClassification?.value || 'geen';
const matchType = biaMatchResult.matchType;
const matchConfidence = biaMatchResult.matchConfidence;
// Build match info message with type and confidence
let matchInfo = '';
if (excelApplicationName) {
if (matchType === 'exact' || matchType === 'search_reference') {
matchInfo = `exacte match met "${excelApplicationName}" uit Excel`;
} else if (matchType === 'fuzzy' && matchConfidence !== undefined) {
const confidencePercent = Math.round(matchConfidence * 100);
matchInfo = `fuzzy match met "${excelApplicationName}" uit Excel (${confidencePercent}% overeenkomst)`;
} else {
matchInfo = `match met "${excelApplicationName}" uit Excel`;
}
} else {
matchInfo = `match gevonden voor "${application.name}"`;
}
// Add warning for fuzzy matches with low confidence
let warning = '';
if (matchType === 'fuzzy' && matchConfidence !== undefined) {
const confidencePercent = Math.round(matchConfidence * 100);
if (confidencePercent < 75) {
warning = ` ⚠️ Let op: Dit is een fuzzy match met ${confidencePercent}% overeenkomst. Controleer of "${excelApplicationName}" inderdaad overeenkomt met "${application.name}".`;
}
}
biaClassification = {
value: excelBIAValue,
reasoning: `Gevonden in BIA.xlsx export (match met "${application.name}"). Originele AI suggestie: ${originalAIValue}. Excel waarde heeft voorrang.`,
reasoning: `Gevonden in BIA.xlsx export (${matchInfo}). Originele AI suggestie: ${originalAIValue}. Excel waarde heeft voorrang.${warning}`,
};
logger.info(`✓ OVERRIDING BIA classification for "${application.name}": Excel value "${excelBIAValue}" (AI suggested: "${originalAIValue}")`);
const matchTypeLabel = matchType === 'exact' ? 'exact' : matchType === 'search_reference' ? 'search reference' : matchType === 'fuzzy' ? `fuzzy (${Math.round((matchConfidence || 0) * 100)}%)` : 'unknown';
logger.info(`✓ OVERRIDING BIA classification for "${application.name}": Excel value "${excelBIAValue}" from "${excelApplicationName || 'unknown'}" (${matchTypeLabel} match, AI suggested: "${originalAIValue}")`);
} else {
logger.debug(`No Excel BIA value found for "${application.name}", using AI suggestion: ${biaClassification?.value || 'geen'}`);
}

View File

@@ -60,7 +60,7 @@ class CMDBService {
}
// Try cache first
const cached = cacheStore.getObject<T>(typeName, id);
const cached = await cacheStore.getObject<T>(typeName, id);
if (cached) {
return cached;
}
@@ -89,14 +89,14 @@ class CMDBService {
const parsed = jiraAssetsClient.parseObject<T>(result.objects[0]);
if (parsed) {
cacheStore.upsertObject(typeName, parsed);
cacheStore.extractAndStoreRelations(typeName, parsed);
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
}
return parsed;
}
// Try cache first
const cached = cacheStore.getObjectByKey<T>(typeName, objectKey);
const cached = await cacheStore.getObjectByKey<T>(typeName, objectKey);
if (cached) {
return cached;
}
@@ -119,14 +119,14 @@ class CMDBService {
const parsed = jiraAssetsClient.parseObject<T>(jiraObj);
if (parsed) {
cacheStore.upsertObject(typeName, parsed);
cacheStore.extractAndStoreRelations(typeName, parsed);
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
}
return parsed;
} catch (error) {
// If object was deleted from Jira, remove it from our cache
if (error instanceof JiraObjectNotFoundError) {
const deleted = cacheStore.deleteObject(typeName, id);
const deleted = await cacheStore.deleteObject(typeName, id);
if (deleted) {
logger.info(`CMDBService: Removed deleted object ${typeName}/${id} from cache`);
}
@@ -145,13 +145,13 @@ class CMDBService {
options?: SearchOptions
): Promise<T[]> {
if (options?.searchTerm) {
return cacheStore.searchByLabel<T>(typeName, options.searchTerm, {
return await cacheStore.searchByLabel<T>(typeName, options.searchTerm, {
limit: options.limit,
offset: options.offset,
});
}
return cacheStore.getObjects<T>(typeName, {
return await cacheStore.getObjects<T>(typeName, {
limit: options?.limit,
offset: options?.offset,
});
@@ -160,15 +160,15 @@ class CMDBService {
/**
* Count objects of a type in cache
*/
countObjects(typeName: CMDBObjectTypeName): number {
return cacheStore.countObjects(typeName);
async countObjects(typeName: CMDBObjectTypeName): Promise<number> {
return await cacheStore.countObjects(typeName);
}
/**
* Search across all object types
*/
async searchAllTypes(searchTerm: string, options?: { limit?: number }): Promise<CMDBObject[]> {
return cacheStore.searchAllTypes(searchTerm, { limit: options?.limit });
return await cacheStore.searchAllTypes(searchTerm, { limit: options?.limit });
}
/**
@@ -179,7 +179,7 @@ class CMDBService {
attributeName: string,
targetTypeName: CMDBObjectTypeName
): Promise<T[]> {
return cacheStore.getRelatedObjects<T>(sourceId, targetTypeName, attributeName);
return await cacheStore.getRelatedObjects<T>(sourceId, targetTypeName, attributeName);
}
/**
@@ -190,7 +190,7 @@ class CMDBService {
sourceTypeName: CMDBObjectTypeName,
attributeName?: string
): Promise<T[]> {
return cacheStore.getReferencingObjects<T>(targetId, sourceTypeName, attributeName);
return await cacheStore.getReferencingObjects<T>(targetId, sourceTypeName, attributeName);
}
// ==========================================================================
@@ -396,29 +396,29 @@ class CMDBService {
/**
* Get cache statistics
*/
getCacheStats(): CacheStats {
return cacheStore.getStats();
async getCacheStats(): Promise<CacheStats> {
return await cacheStore.getStats();
}
/**
* Check if cache has data
*/
isCacheWarm(): boolean {
return cacheStore.isWarm();
async isCacheWarm(): Promise<boolean> {
return await cacheStore.isWarm();
}
/**
* Clear cache for a specific type
*/
clearCacheForType(typeName: CMDBObjectTypeName): void {
cacheStore.clearObjectType(typeName);
async clearCacheForType(typeName: CMDBObjectTypeName): Promise<void> {
await cacheStore.clearObjectType(typeName);
}
/**
* Clear entire cache
*/
clearCache(): void {
cacheStore.clearAll();
async clearCache(): Promise<void> {
await cacheStore.clearAll();
}
// ==========================================================================
@@ -429,7 +429,11 @@ class CMDBService {
* Set user token for current request
*/
setUserToken(token: string | null): void {
jiraAssetsClient.setRequestToken(token);
if (token) {
jiraAssetsClient.setRequestToken(token);
} else {
jiraAssetsClient.clearRequestToken();
}
}
/**

View File

@@ -0,0 +1,190 @@
import { readFileSync, existsSync } from 'fs';
import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
import { logger } from './logger.js';
import type { DataCompletenessConfig, CompletenessFieldConfig } from '../types/index.js';
// Get __dirname equivalent for ES modules
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Path to the configuration file
const CONFIG_FILE_PATH = join(__dirname, '../../data/data-completeness-config.json');
// Cache for loaded configuration
let cachedConfig: DataCompletenessConfig | null = null;
/**
* Get the default completeness configuration
*/
function getDefaultConfig(): DataCompletenessConfig {
return {
metadata: {
version: '2.0.0',
description: 'Configuration for Data Completeness Score fields',
lastUpdated: new Date().toISOString(),
},
categories: [
{
id: 'general',
name: 'General',
description: 'General application information fields',
fields: [
{ id: 'organisation', name: 'Organisation', fieldPath: 'organisation', enabled: true },
{ id: 'applicationFunctions', name: 'ApplicationFunction', fieldPath: 'applicationFunctions', enabled: true },
{ id: 'status', name: 'Status', fieldPath: 'status', enabled: true },
{ id: 'businessImpactAnalyse', name: 'Business Impact Analyse', fieldPath: 'businessImpactAnalyse', enabled: true },
{ id: 'hostingType', name: 'Application Component Hosting Type', fieldPath: 'hostingType', enabled: true },
{ id: 'supplierProduct', name: 'Supplier Product', fieldPath: 'supplierProduct', enabled: true },
{ id: 'businessOwner', name: 'Business Owner', fieldPath: 'businessOwner', enabled: true },
{ id: 'systemOwner', name: 'System Owner', fieldPath: 'systemOwner', enabled: true },
{ id: 'functionalApplicationManagement', name: 'Functional Application Management', fieldPath: 'functionalApplicationManagement', enabled: true },
{ id: 'technicalApplicationManagement', name: 'Technical Application Management', fieldPath: 'technicalApplicationManagement', enabled: true },
],
},
{
id: 'applicationManagement',
name: 'Application Management',
description: 'Application management classification fields',
fields: [
{ id: 'governanceModel', name: 'ICT Governance Model', fieldPath: 'governanceModel', enabled: true },
{ id: 'applicationType', name: 'Application Management - Application Type', fieldPath: 'applicationType', enabled: true },
{ id: 'applicationManagementHosting', name: 'Application Management - Hosting', fieldPath: 'applicationManagementHosting', enabled: true },
{ id: 'applicationManagementTAM', name: 'Application Management - TAM', fieldPath: 'applicationManagementTAM', enabled: true },
{ id: 'dynamicsFactor', name: 'Application Management - Dynamics Factor', fieldPath: 'dynamicsFactor', enabled: true },
{ id: 'complexityFactor', name: 'Application Management - Complexity Factor', fieldPath: 'complexityFactor', enabled: true },
{ id: 'numberOfUsers', name: 'Application Management - Number of Users', fieldPath: 'numberOfUsers', enabled: true },
],
},
],
};
}
/**
* Get the data completeness configuration
* Uses cache if available, otherwise loads from file or returns default
*/
export function getDataCompletenessConfig(): DataCompletenessConfig {
// Return cached config if available
if (cachedConfig) {
return cachedConfig;
}
// Try to load from file
if (existsSync(CONFIG_FILE_PATH)) {
try {
const fileContent = readFileSync(CONFIG_FILE_PATH, 'utf-8');
cachedConfig = JSON.parse(fileContent) as DataCompletenessConfig;
logger.info('Loaded data completeness configuration from file');
return cachedConfig;
} catch (error) {
logger.warn('Failed to load data completeness configuration from file, using default', error);
}
}
// Return default config
cachedConfig = getDefaultConfig();
return cachedConfig;
}
/**
* Clear the cached configuration (call after updating the config file)
*/
export function clearDataCompletenessConfigCache(): void {
cachedConfig = null;
}
/**
* Get enabled fields for a category by ID
*/
export function getEnabledFieldsForCategory(categoryId: string): CompletenessFieldConfig[] {
const config = getDataCompletenessConfig();
const category = config.categories.find(c => c.id === categoryId);
if (!category) return [];
return category.fields.filter(field => field.enabled);
}
/**
* Get all enabled fields across all categories
*/
export function getAllEnabledFields(): { field: CompletenessFieldConfig; categoryId: string; categoryName: string }[] {
const config = getDataCompletenessConfig();
const result: { field: CompletenessFieldConfig; categoryId: string; categoryName: string }[] = [];
config.categories.forEach(category => {
category.fields
.filter(f => f.enabled)
.forEach(f => result.push({ field: f, categoryId: category.id, categoryName: category.name }));
});
return result;
}
/**
* Helper function to get value from ApplicationDetails using field path
*/
function getFieldValue(app: any, fieldPath: string): any {
const paths = fieldPath.split('.');
let value: any = app;
for (const path of paths) {
if (value === null || value === undefined) return null;
value = (value as any)[path];
}
return value;
}
/**
* Helper function to check if a field is filled based on fieldPath
*/
function isFieldFilled(app: any, fieldPath: string): boolean {
const value = getFieldValue(app, fieldPath);
// Special handling for arrays (e.g., applicationFunctions)
if (Array.isArray(value)) {
return value.length > 0;
}
// For reference values (objects with objectId)
if (value && typeof value === 'object' && 'objectId' in value) {
return !!value.objectId;
}
// For primitive values
return value !== null && value !== undefined && value !== '';
}
/**
* Calculate data completeness score for a single application
* Returns the overall percentage (0-100)
*/
export function calculateApplicationCompleteness(app: any): number {
const config = getDataCompletenessConfig();
// Build field maps from config
const categoryFieldMap = new Map<string, { name: string; fieldPath: string }[]>();
config.categories.forEach(category => {
const enabledFields = category.fields.filter(f => f.enabled);
categoryFieldMap.set(category.id, enabledFields.map(f => ({ name: f.name, fieldPath: f.fieldPath })));
});
// Calculate total enabled fields across all categories
const TOTAL_FIELDS = Array.from(categoryFieldMap.values()).reduce((sum, fields) => sum + fields.length, 0);
if (TOTAL_FIELDS === 0) {
return 0;
}
// Count filled fields
let totalFilled = 0;
config.categories.forEach(category => {
const categoryFields = categoryFieldMap.get(category.id) || [];
categoryFields.forEach(({ fieldPath }) => {
if (isFieldFilled(app, fieldPath)) {
totalFilled++;
}
});
});
return (totalFilled / TOTAL_FIELDS) * 100;
}

View File

@@ -45,6 +45,7 @@ import type {
PlatformWithWorkloads,
} from '../types/index.js';
import { calculateRequiredEffortWithMinMax } from './effortCalculation.js';
import { calculateApplicationCompleteness } from './dataCompletenessConfig.js';
// Determine if we should use real Jira Assets or mock data
const useJiraAssets = !!(config.jiraPat && config.jiraSchemaId);
@@ -286,6 +287,36 @@ async function toApplicationDetails(app: ApplicationComponent): Promise<Applicat
overrideFTE: app.applicationManagementOverrideFTE ?? null,
requiredEffortApplicationManagement: null,
};
// Calculate data completeness percentage
// Convert ApplicationListItem-like structure to format expected by completeness calculator
const appForCompleteness = {
...result,
organisation: organisation?.name || null,
applicationFunctions: result.applicationFunctions,
status: result.status,
businessImpactAnalyse: businessImpactAnalyse,
hostingType: hostingType,
supplierProduct: result.supplierProduct,
businessOwner: result.businessOwner,
systemOwner: result.systemOwner,
functionalApplicationManagement: result.functionalApplicationManagement,
technicalApplicationManagement: result.technicalApplicationManagement,
governanceModel: governanceModel,
applicationType: applicationType,
applicationManagementHosting: applicationManagementHosting,
applicationManagementTAM: applicationManagementTAM,
dynamicsFactor: dynamicsFactor,
complexityFactor: complexityFactor,
numberOfUsers: numberOfUsers,
};
const completenessPercentage = calculateApplicationCompleteness(appForCompleteness);
return {
...result,
dataCompletenessPercentage: Math.round(completenessPercentage * 10) / 10, // Round to 1 decimal
};
}
// Pre-loaded factor caches for synchronous access
@@ -416,16 +447,17 @@ function toApplicationListItem(app: ApplicationComponent): ApplicationListItem {
const applicationFunctions = toReferenceValues(app.applicationFunction);
const applicationManagementHosting = toReferenceValue(app.applicationManagementHosting);
const applicationManagementTAM = toReferenceValue(app.applicationManagementTAM);
const businessImpactAnalyse = toReferenceValue(app.businessImpactAnalyse);
// Calculate effort using minimal details
const minimalDetails = toMinimalDetailsForEffort(app);
const effortResult = calculateRequiredEffortWithMinMax(minimalDetails);
return {
const result: ApplicationListItem = {
id: app.id,
key: app.objectKey,
name: app.label,
status: (app.status || 'In Production') as ApplicationStatus,
status: app.status as ApplicationStatus | null,
applicationFunctions,
governanceModel,
dynamicsFactor,
@@ -436,11 +468,41 @@ function toApplicationListItem(app: ApplicationComponent): ApplicationListItem {
platform,
applicationManagementHosting,
applicationManagementTAM,
businessImpactAnalyse,
requiredEffortApplicationManagement: effortResult.finalEffort,
minFTE: effortResult.minFTE,
maxFTE: effortResult.maxFTE,
overrideFTE: app.applicationManagementOverrideFTE ?? null,
};
// Calculate data completeness percentage
// Convert ApplicationListItem to format expected by completeness calculator
const appForCompleteness = {
organisation: toReferenceValue(app.organisation)?.name || null,
applicationFunctions: result.applicationFunctions,
status: result.status,
businessImpactAnalyse: result.businessImpactAnalyse,
hostingType: toReferenceValue(app.applicationComponentHostingType),
supplierProduct: app.supplierProduct?.label || null,
businessOwner: app.businessOwner?.label || null,
systemOwner: app.systemOwner?.label || null,
functionalApplicationManagement: app.functionalApplicationManagement || null,
technicalApplicationManagement: app.technicalApplicationManagement?.label || null,
governanceModel: result.governanceModel,
applicationType: result.applicationType,
applicationManagementHosting: result.applicationManagementHosting,
applicationManagementTAM: result.applicationManagementTAM,
dynamicsFactor: result.dynamicsFactor,
complexityFactor: result.complexityFactor,
numberOfUsers: toReferenceValue(app.applicationManagementNumberOfUsers),
};
const completenessPercentage = calculateApplicationCompleteness(appForCompleteness);
return {
...result,
dataCompletenessPercentage: Math.round(completenessPercentage * 10) / 10, // Round to 1 decimal
};
}
// =============================================================================
@@ -475,7 +537,11 @@ export const dataService = {
}
if (filters.statuses && filters.statuses.length > 0) {
apps = apps.filter(app => filters.statuses!.includes(app.status as ApplicationStatus));
apps = apps.filter(app => {
// Handle empty/null status - treat as 'Undefined' for filtering
const status = app.status || 'Undefined';
return filters.statuses!.includes(status as ApplicationStatus);
});
}
// Organisation filter (now ObjectReference)
@@ -988,7 +1054,7 @@ export const dataService = {
unclassifiedCount,
withApplicationFunction,
applicationFunctionPercentage,
cacheStatus: cmdbService.getCacheStats(),
cacheStatus: await cmdbService.getCacheStats(),
};
if (includeDistributions) {
@@ -1030,6 +1096,186 @@ export const dataService = {
return jiraAssetsService.getTeamDashboardData(excludedStatuses);
},
/**
* Get team portfolio health metrics
* Calculates average complexity, dynamics, BIA, and governance maturity per team
*/
async getTeamPortfolioHealth(excludedStatuses: ApplicationStatus[] = []): Promise<{
teams: Array<{
team: ReferenceValue | null;
metrics: {
complexity: number; // Average complexity factor (0-1 normalized)
dynamics: number; // Average dynamics factor (0-1 normalized)
bia: number; // Average BIA level (0-1 normalized, F=1.0, A=0.0)
governanceMaturity: number; // Average governance maturity (0-1 normalized, A=1.0, E=0.0)
};
applicationCount: number;
}>;
}> {
// For mock data, use the same implementation (cmdbService routes to mock data when useJiraAssets is false)
// Get all applications from cache to access all fields including BIA
let apps = await cmdbService.getObjects<ApplicationComponent>('ApplicationComponent');
// Filter out excluded statuses
if (excludedStatuses.length > 0) {
apps = apps.filter(app => !app.status || !excludedStatuses.includes(app.status as ApplicationStatus));
}
// Ensure factor caches are loaded
await ensureFactorCaches();
// Helper to convert BIA letter to numeric (F=6, E=5, D=4, C=3, B=2, A=1)
// Handles formats like "BIA-2024-0042 (Klasse E)" or just "E"
const biaToNumeric = (bia: string | null): number | null => {
if (!bia) return null;
// Extract letter from patterns like "Klasse E", "E", or "(Klasse E)"
const match = bia.match(/[Kk]lasse\s+([A-F])/i) || bia.match(/\b([A-F])\b/i);
if (match) {
const letter = match[1].toUpperCase();
const biaMap: Record<string, number> = { 'F': 6, 'E': 5, 'D': 4, 'C': 3, 'B': 2, 'A': 1 };
return biaMap[letter] || null;
}
return null;
};
// Helper to convert governance model to maturity score (A=5, B=4, C=3, D=2, E=1)
const governanceToMaturity = (govModel: string | null): number | null => {
if (!govModel) return null;
// Extract letter from "Regiemodel X" or just "X"
const match = govModel.match(/Regiemodel\s+([A-E]\+?)/i) || govModel.match(/^([A-E]\+?)$/i);
if (match) {
const letter = match[1].toUpperCase();
if (letter === 'A') return 5;
if (letter === 'B' || letter === 'B+') return 4;
if (letter === 'C') return 3;
if (letter === 'D') return 2;
if (letter === 'E') return 1;
}
return null;
};
// Helper to get factor value from ReferenceValue
const getFactorValue = (ref: ReferenceValue | null): number | null => {
if (!ref) return null;
// Look up in dynamics factors cache
const dynamicsFactor = dynamicsFactorCache?.get(ref.objectId);
if (dynamicsFactor?.factor !== undefined) return dynamicsFactor.factor;
// Look up in complexity factors cache
const complexityFactor = complexityFactorCache?.get(ref.objectId);
if (complexityFactor?.factor !== undefined) return complexityFactor.factor;
return null;
};
// Collect all applications grouped by team
const teamMetrics: Map<string, {
team: ReferenceValue | null;
complexityValues: number[];
dynamicsValues: number[];
biaValues: number[];
governanceValues: number[];
applicationCount: number;
}> = new Map();
// Process each application
for (const app of apps) {
// Get team from application (via subteam lookup if needed)
let team: ReferenceValue | null = null;
const applicationSubteam = toReferenceValue((app as any).applicationManagementSubteam);
const applicationTeam = toReferenceValue((app as any).applicationManagementTeam);
// Prefer direct team assignment, otherwise try to get from subteam
if (applicationTeam) {
team = applicationTeam;
} else if (applicationSubteam) {
// Look up team from subteam (would need subteam cache, but for now use subteam as fallback)
team = applicationSubteam; // Fallback: use subteam if team not directly assigned
}
const teamKey = team?.objectId || 'unassigned';
if (!teamMetrics.has(teamKey)) {
teamMetrics.set(teamKey, {
team,
complexityValues: [],
dynamicsValues: [],
biaValues: [],
governanceValues: [],
applicationCount: 0,
});
}
const metrics = teamMetrics.get(teamKey)!;
metrics.applicationCount++;
// Get complexity factor value
if (app.applicationManagementComplexityFactor && typeof app.applicationManagementComplexityFactor === 'object') {
const factorObj = complexityFactorCache?.get(app.applicationManagementComplexityFactor.objectId);
if (factorObj?.factor !== undefined) {
metrics.complexityValues.push(factorObj.factor);
}
}
// Get dynamics factor value
if (app.applicationManagementDynamicsFactor && typeof app.applicationManagementDynamicsFactor === 'object') {
const factorObj = dynamicsFactorCache?.get(app.applicationManagementDynamicsFactor.objectId);
if (factorObj?.factor !== undefined) {
metrics.dynamicsValues.push(factorObj.factor);
}
}
// Get BIA value
if (app.businessImpactAnalyse) {
const biaRef = toReferenceValue(app.businessImpactAnalyse);
if (biaRef) {
const biaNum = biaToNumeric(biaRef.name);
if (biaNum !== null) metrics.biaValues.push(biaNum);
}
}
// Get governance maturity
if (app.ictGovernanceModel) {
const govRef = toReferenceValue(app.ictGovernanceModel);
if (govRef) {
const maturity = governanceToMaturity(govRef.name);
if (maturity !== null) metrics.governanceValues.push(maturity);
}
}
}
// Calculate averages and normalize to 0-1 scale
const result = Array.from(teamMetrics.values()).map(metrics => {
// Calculate averages
const avgComplexity = metrics.complexityValues.length > 0
? metrics.complexityValues.reduce((a, b) => a + b, 0) / metrics.complexityValues.length
: 0;
const avgDynamics = metrics.dynamicsValues.length > 0
? metrics.dynamicsValues.reduce((a, b) => a + b, 0) / metrics.dynamicsValues.length
: 0;
const avgBIA = metrics.biaValues.length > 0
? metrics.biaValues.reduce((a, b) => a + b, 0) / metrics.biaValues.length
: 0;
const avgGovernance = metrics.governanceValues.length > 0
? metrics.governanceValues.reduce((a, b) => a + b, 0) / metrics.governanceValues.length
: 0;
// Normalize to 0-1 scale
// Complexity and Dynamics: assume max factor is 1.0 (already normalized)
// BIA: 1-6 scale -> normalize to 0-1 (1=0.0, 6=1.0)
// Governance: 1-5 scale -> normalize to 0-1 (1=0.0, 5=1.0)
return {
team: metrics.team,
metrics: {
complexity: Math.min(1, Math.max(0, avgComplexity)),
dynamics: Math.min(1, Math.max(0, avgDynamics)),
bia: (avgBIA - 1) / 5, // (1-6) -> (0-1)
governanceMaturity: (avgGovernance - 1) / 4, // (1-5) -> (0-1)
},
applicationCount: metrics.applicationCount,
};
});
return { teams: result };
},
// ===========================================================================
// Utility
// ===========================================================================
@@ -1046,15 +1292,15 @@ export const dataService = {
/**
* Get cache status
*/
getCacheStatus(): CacheStats {
return cacheStore.getStats();
async getCacheStatus(): Promise<CacheStats> {
return await cacheStore.getStats();
},
/**
* Check if cache is warm
*/
isCacheWarm(): boolean {
return cacheStore.isWarm();
async isCacheWarm(): Promise<boolean> {
return await cacheStore.isWarm();
},
/**
@@ -1078,4 +1324,159 @@ export const dataService = {
referenceCache.clear();
clearFactorCaches();
},
// ===========================================================================
// Business Importance vs BIA Comparison
// ===========================================================================
/**
* Get Business Importance vs BIA comparison data
*/
async getBusinessImportanceComparison() {
// Fetch all applications
const allApps = await cmdbService.getObjects<ApplicationComponent>('ApplicationComponent');
// Fetch Business Importance reference values from CMDB
const businessImportanceRefs = await this.getBusinessImportance();
// Create a map for quick lookup: name -> normalized value
const biNormalizationMap = new Map<string, number>();
for (const ref of businessImportanceRefs) {
// Extract numeric prefix from name (e.g., "0 - Critical Infrastructure" -> 0)
const match = ref.name.match(/^(\d+)\s*-/);
if (match) {
const numValue = parseInt(match[1], 10);
// Only include 0-6, exclude 9 (Unknown)
if (numValue >= 0 && numValue <= 6) {
biNormalizationMap.set(ref.name, numValue);
}
}
}
const comparisonItems: Array<{
id: string;
key: string;
name: string;
searchReference: string | null;
businessImportance: string | null;
businessImportanceNormalized: number | null;
businessImpactAnalyse: ReferenceValue | null;
biaClass: string | null;
biaClassNormalized: number | null;
discrepancyScore: number;
discrepancyCategory: 'high_bi_low_bia' | 'low_bi_high_bia' | 'aligned' | 'missing_data';
}> = [];
// Process each application directly from the objects we already have
for (const app of allApps) {
if (!app.id || !app.label) continue;
// Extract Business Importance from app object
const businessImportanceRef = toReferenceValue(app.businessImportance);
const businessImportanceName = businessImportanceRef?.name || null;
// Normalize Business Importance
let biNormalized: number | null = null;
if (businessImportanceName) {
// Try to find matching ReferenceValue
const matchedRef = businessImportanceRefs.find(ref => ref.name === businessImportanceName);
if (matchedRef) {
biNormalized = biNormalizationMap.get(matchedRef.name) ?? null;
} else {
// Fallback: try to extract directly from the string
const directMatch = businessImportanceName.match(/^(\d+)\s*-/);
if (directMatch) {
const numValue = parseInt(directMatch[1], 10);
if (numValue >= 0 && numValue <= 6) {
biNormalized = numValue;
}
}
}
}
// Extract BIA from app object
const businessImpactAnalyseRef = toReferenceValue(app.businessImpactAnalyse);
// Normalize BIA Class
let biaClass: string | null = null;
let biaNormalized: number | null = null;
if (businessImpactAnalyseRef?.name) {
// Extract class letter from name (e.g., "BIA-2024-0042 (Klasse E)" -> "E")
const biaMatch = businessImpactAnalyseRef.name.match(/Klasse\s+([A-F])/i);
if (biaMatch) {
biaClass = biaMatch[1].toUpperCase();
// Convert to numeric: A=1, B=2, C=3, D=4, E=5, F=6
biaNormalized = biaClass.charCodeAt(0) - 64; // A=65, so 65-64=1, etc.
} else {
// Try to extract single letter if format is different
const singleLetterMatch = businessImpactAnalyseRef.name.match(/\b([A-F])\b/i);
if (singleLetterMatch) {
biaClass = singleLetterMatch[1].toUpperCase();
biaNormalized = biaClass.charCodeAt(0) - 64;
}
}
}
// Calculate discrepancy
let discrepancyScore = 0;
let discrepancyCategory: 'high_bi_low_bia' | 'low_bi_high_bia' | 'aligned' | 'missing_data' = 'missing_data';
if (biNormalized !== null && biaNormalized !== null) {
discrepancyScore = Math.abs(biNormalized - biaNormalized);
// Categorize discrepancy
if (biNormalized <= 2 && biaNormalized <= 2) {
// High BI (0-2: Critical Infrastructure/Critical/Highest) AND Low BIA (A-B: Low impact)
// IT thinks critical (0-2) but business says low impact (A-B)
discrepancyCategory = 'high_bi_low_bia';
} else if (biNormalized >= 5 && biaNormalized >= 5) {
// Low BI (5-6: Low/Lowest) AND High BIA (E-F: High impact)
// IT thinks low priority (5-6) but business says high impact (E-F)
discrepancyCategory = 'low_bi_high_bia';
} else if (discrepancyScore <= 2) {
// Aligned: values are reasonably close (discrepancy ≤ 2)
discrepancyCategory = 'aligned';
} else {
// Medium discrepancy (3-4) - still consider aligned if not in extreme categories
discrepancyCategory = 'aligned';
}
}
comparisonItems.push({
id: app.id,
key: app.objectKey,
name: app.label,
searchReference: app.searchReference || null,
businessImportance: businessImportanceName,
businessImportanceNormalized: biNormalized,
businessImpactAnalyse: businessImpactAnalyseRef,
biaClass,
biaClassNormalized: biaNormalized,
discrepancyScore,
discrepancyCategory,
});
}
// Calculate summary statistics
const total = comparisonItems.length;
const withBothFields = comparisonItems.filter(item =>
item.businessImportanceNormalized !== null && item.biaClassNormalized !== null
).length;
const highBiLowBia = comparisonItems.filter(item => item.discrepancyCategory === 'high_bi_low_bia').length;
const lowBiHighBia = comparisonItems.filter(item => item.discrepancyCategory === 'low_bi_high_bia').length;
const aligned = comparisonItems.filter(item => item.discrepancyCategory === 'aligned').length;
const missingData = comparisonItems.filter(item => item.discrepancyCategory === 'missing_data').length;
return {
applications: comparisonItems,
summary: {
total,
withBothFields,
highBiLowBia,
lowBiHighBia,
aligned,
missingData,
},
};
},
};

View File

@@ -1,25 +1,63 @@
import Database from 'better-sqlite3';
import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
import { logger } from './logger.js';
import type { ClassificationResult } from '../types/index.js';
import { createClassificationsDatabaseAdapter } from './database/factory.js';
import type { DatabaseAdapter } from './database/interface.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const DB_PATH = join(__dirname, '../../data/classifications.db');
class DatabaseService {
private db: Database.Database;
private db: DatabaseAdapter;
private initialized: boolean = false;
private initializationPromise: Promise<void> | null = null;
constructor() {
this.db = new Database(DB_PATH);
this.initialize();
this.db = createClassificationsDatabaseAdapter();
// Start initialization but don't wait for it
this.initializationPromise = this.initialize();
}
private initialize(): void {
/**
* Ensure database is initialized before executing queries
*/
private async ensureInitialized(): Promise<void> {
if (this.initialized) return;
if (this.initializationPromise) {
await this.initializationPromise;
return;
}
// If for some reason initialization wasn't started, start it now
this.initializationPromise = this.initialize();
await this.initializationPromise;
}
private async initialize(): Promise<void> {
if (this.initialized) return;
// Create tables if they don't exist
this.db.exec(`
const isPostgres = process.env.DATABASE_TYPE === 'postgres' || process.env.DATABASE_TYPE === 'postgresql';
const schema = isPostgres ? `
CREATE TABLE IF NOT EXISTS classification_history (
id SERIAL PRIMARY KEY,
application_id TEXT NOT NULL,
application_name TEXT NOT NULL,
changes TEXT NOT NULL,
source TEXT NOT NULL,
timestamp TEXT NOT NULL,
user_id TEXT
);
CREATE TABLE IF NOT EXISTS session_state (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_classification_app_id ON classification_history(application_id);
CREATE INDEX IF NOT EXISTS idx_classification_timestamp ON classification_history(timestamp);
` : `
CREATE TABLE IF NOT EXISTS classification_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
application_id TEXT NOT NULL,
@@ -38,35 +76,35 @@ class DatabaseService {
CREATE INDEX IF NOT EXISTS idx_classification_app_id ON classification_history(application_id);
CREATE INDEX IF NOT EXISTS idx_classification_timestamp ON classification_history(timestamp);
`);
`;
await this.db.exec(schema);
this.initialized = true;
logger.info('Database initialized');
}
saveClassificationResult(result: ClassificationResult): void {
const stmt = this.db.prepare(`
async saveClassificationResult(result: ClassificationResult): Promise<void> {
await this.ensureInitialized();
await this.db.execute(`
INSERT INTO classification_history (application_id, application_name, changes, source, timestamp, user_id)
VALUES (?, ?, ?, ?, ?, ?)
`);
stmt.run(
`, [
result.applicationId,
result.applicationName,
JSON.stringify(result.changes),
result.source,
result.timestamp.toISOString(),
result.userId || null
);
]);
}
getClassificationHistory(limit: number = 50): ClassificationResult[] {
const stmt = this.db.prepare(`
async getClassificationHistory(limit: number = 50): Promise<ClassificationResult[]> {
await this.ensureInitialized();
const rows = await this.db.query<any>(`
SELECT * FROM classification_history
ORDER BY timestamp DESC
LIMIT ?
`);
const rows = stmt.all(limit) as any[];
`, [limit]);
return rows.map((row) => ({
applicationId: row.application_id,
@@ -78,14 +116,13 @@ class DatabaseService {
}));
}
getClassificationsByApplicationId(applicationId: string): ClassificationResult[] {
const stmt = this.db.prepare(`
async getClassificationsByApplicationId(applicationId: string): Promise<ClassificationResult[]> {
await this.ensureInitialized();
const rows = await this.db.query<any>(`
SELECT * FROM classification_history
WHERE application_id = ?
ORDER BY timestamp DESC
`);
const rows = stmt.all(applicationId) as any[];
`, [applicationId]);
return rows.map((row) => ({
applicationId: row.application_id,
@@ -97,47 +134,48 @@ class DatabaseService {
}));
}
saveSessionState(key: string, value: any): void {
const stmt = this.db.prepare(`
async saveSessionState(key: string, value: any): Promise<void> {
await this.ensureInitialized();
const now = new Date().toISOString();
const valueStr = JSON.stringify(value);
await this.db.execute(`
INSERT INTO session_state (key, value, updated_at)
VALUES (?, ?, ?)
ON CONFLICT(key) DO UPDATE SET value = ?, updated_at = ?
`);
const now = new Date().toISOString();
const valueStr = JSON.stringify(value);
stmt.run(key, valueStr, now, valueStr, now);
`, [key, valueStr, now, valueStr, now]);
}
getSessionState<T>(key: string): T | null {
const stmt = this.db.prepare(`
async getSessionState<T>(key: string): Promise<T | null> {
await this.ensureInitialized();
const row = await this.db.queryOne<{ value: string }>(`
SELECT value FROM session_state WHERE key = ?
`);
const row = stmt.get(key) as { value: string } | undefined;
`, [key]);
if (row) {
return JSON.parse(row.value) as T;
}
return null;
}
clearSessionState(key: string): void {
const stmt = this.db.prepare(`
async clearSessionState(key: string): Promise<void> {
await this.ensureInitialized();
await this.db.execute(`
DELETE FROM session_state WHERE key = ?
`);
stmt.run(key);
`, [key]);
}
getStats(): { totalClassifications: number; bySource: Record<string, number> } {
const totalStmt = this.db.prepare(`
async getStats(): Promise<{ totalClassifications: number; bySource: Record<string, number> }> {
await this.ensureInitialized();
const totalRow = await this.db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count FROM classification_history
`);
const total = (totalStmt.get() as { count: number }).count;
const total = totalRow?.count || 0;
const bySourceStmt = this.db.prepare(`
const bySourceRows = await this.db.query<{ source: string; count: number }>(`
SELECT source, COUNT(*) as count FROM classification_history GROUP BY source
`);
const bySourceRows = bySourceStmt.all() as { source: string; count: number }[];
const bySource: Record<string, number> = {};
bySourceRows.forEach((row) => {
bySource[row.source] = row.count;
@@ -146,8 +184,8 @@ class DatabaseService {
return { totalClassifications: total, bySource };
}
close(): void {
this.db.close();
async close(): Promise<void> {
await this.db.close();
}
}

View File

@@ -0,0 +1,79 @@
/**
* Database Factory
*
* Creates the appropriate database adapter based on environment configuration.
*/
import { logger } from '../logger.js';
import { PostgresAdapter } from './postgresAdapter.js';
import { SqliteAdapter } from './sqliteAdapter.js';
import type { DatabaseAdapter } from './interface.js';
import { join, dirname } from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
/**
* Create a database adapter based on environment variables
*/
export function createDatabaseAdapter(dbType?: string, dbPath?: string): DatabaseAdapter {
const type = dbType || process.env.DATABASE_TYPE || 'sqlite';
const databaseUrl = process.env.DATABASE_URL;
if (type === 'postgres' || type === 'postgresql') {
if (!databaseUrl) {
// Try to construct from individual components
const host = process.env.DATABASE_HOST || 'localhost';
const port = process.env.DATABASE_PORT || '5432';
const name = process.env.DATABASE_NAME || 'cmdb';
const user = process.env.DATABASE_USER || 'cmdb';
const password = process.env.DATABASE_PASSWORD || '';
const ssl = process.env.DATABASE_SSL === 'true' ? '?sslmode=require' : '';
const constructedUrl = `postgresql://${user}:${password}@${host}:${port}/${name}${ssl}`;
logger.info('Creating PostgreSQL adapter with constructed connection string');
return new PostgresAdapter(constructedUrl);
}
logger.info('Creating PostgreSQL adapter');
return new PostgresAdapter(databaseUrl);
}
// Default to SQLite
const defaultPath = dbPath || join(__dirname, '../../data/cmdb-cache.db');
logger.info(`Creating SQLite adapter with path: ${defaultPath}`);
return new SqliteAdapter(defaultPath);
}
/**
* Create a database adapter for the classifications database
*/
export function createClassificationsDatabaseAdapter(): DatabaseAdapter {
const type = process.env.DATABASE_TYPE || 'sqlite';
const databaseUrl = process.env.CLASSIFICATIONS_DATABASE_URL || process.env.DATABASE_URL;
if (type === 'postgres' || type === 'postgresql') {
if (!databaseUrl) {
// Try to construct from individual components
const host = process.env.DATABASE_HOST || 'localhost';
const port = process.env.DATABASE_PORT || '5432';
const name = process.env.CLASSIFICATIONS_DATABASE_NAME || process.env.DATABASE_NAME || 'cmdb';
const user = process.env.DATABASE_USER || 'cmdb';
const password = process.env.DATABASE_PASSWORD || '';
const ssl = process.env.DATABASE_SSL === 'true' ? '?sslmode=require' : '';
const constructedUrl = `postgresql://${user}:${password}@${host}:${port}/${name}${ssl}`;
logger.info('Creating PostgreSQL adapter for classifications with constructed connection string');
return new PostgresAdapter(constructedUrl);
}
logger.info('Creating PostgreSQL adapter for classifications');
return new PostgresAdapter(databaseUrl);
}
// Default to SQLite
const defaultPath = join(__dirname, '../../data/classifications.db');
logger.info(`Creating SQLite adapter for classifications with path: ${defaultPath}`);
return new SqliteAdapter(defaultPath);
}

View File

@@ -0,0 +1,43 @@
/**
* Database Adapter Interface
*
* Provides a unified interface for database operations across different database engines.
* This allows switching between SQLite (development) and PostgreSQL (production) seamlessly.
*/
export interface DatabaseAdapter {
/**
* Execute a query and return results
*/
query<T = any>(sql: string, params?: any[]): Promise<T[]>;
/**
* Execute a query and return a single row
*/
queryOne<T = any>(sql: string, params?: any[]): Promise<T | null>;
/**
* Execute a statement (INSERT, UPDATE, DELETE) and return affected rows
*/
execute(sql: string, params?: any[]): Promise<number>;
/**
* Execute multiple statements in a transaction
*/
transaction<T>(callback: (db: DatabaseAdapter) => Promise<T>): Promise<T>;
/**
* Execute raw SQL (for schema initialization, etc.)
*/
exec(sql: string): Promise<void>;
/**
* Close the database connection
*/
close(): Promise<void>;
/**
* Get database size in bytes (if applicable)
*/
getSizeBytes?(): Promise<number>;
}

View File

@@ -0,0 +1,149 @@
/**
* PostgreSQL Database Adapter
*
* Implements DatabaseAdapter for PostgreSQL using the 'pg' library.
*/
import { Pool, PoolClient } from 'pg';
import { logger } from '../logger.js';
import type { DatabaseAdapter } from './interface.js';
export class PostgresAdapter implements DatabaseAdapter {
private pool: Pool;
private connectionString: string;
constructor(connectionString: string) {
this.connectionString = connectionString;
this.pool = new Pool({
connectionString,
max: 20, // Maximum number of clients in the pool
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 10000, // Increased timeout for initial connection
});
// Handle pool errors
this.pool.on('error', (err) => {
logger.error('PostgreSQL pool error:', err);
});
// Test connection on creation
this.pool.query('SELECT 1').catch((err) => {
logger.error('PostgreSQL: Failed to connect to database', err);
logger.error('Connection string:', connectionString.replace(/:[^:@]+@/, ':****@'));
});
}
async query<T = any>(sql: string, params?: any[]): Promise<T[]> {
try {
// Convert SQLite-style ? placeholders to PostgreSQL $1, $2, etc.
const convertedSql = this.convertPlaceholders(sql);
const result = await this.pool.query(convertedSql, params);
return result.rows as T[];
} catch (error) {
logger.error('PostgreSQL query error:', { sql, params, error });
throw error;
}
}
async queryOne<T = any>(sql: string, params?: any[]): Promise<T | null> {
const rows = await this.query<T>(sql, params);
return rows.length > 0 ? rows[0] : null;
}
async execute(sql: string, params?: any[]): Promise<number> {
try {
const convertedSql = this.convertPlaceholders(sql);
const result = await this.pool.query(convertedSql, params);
return result.rowCount || 0;
} catch (error) {
logger.error('PostgreSQL execute error:', { sql, params, error });
throw error;
}
}
async transaction<T>(callback: (db: DatabaseAdapter) => Promise<T>): Promise<T> {
const client: PoolClient = await this.pool.connect();
try {
await client.query('BEGIN');
// Create a transaction-scoped adapter
const transactionAdapter: DatabaseAdapter = {
query: async (sql: string, params?: any[]) => {
const convertedSql = this.convertPlaceholders(sql);
const result = await client.query(convertedSql, params);
return result.rows;
},
queryOne: async (sql: string, params?: any[]) => {
const convertedSql = this.convertPlaceholders(sql);
const result = await client.query(convertedSql, params);
return result.rows.length > 0 ? result.rows[0] : null;
},
execute: async (sql: string, params?: any[]) => {
const convertedSql = this.convertPlaceholders(sql);
const result = await client.query(convertedSql, params);
return result.rowCount || 0;
},
transaction: async (cb) => {
// Nested transactions not supported - just execute in same transaction
return cb(transactionAdapter);
},
exec: async (sql: string) => {
await client.query(sql);
},
close: async () => {
// Don't close client in nested transaction
},
};
const result = await callback(transactionAdapter);
await client.query('COMMIT');
return result;
} catch (error) {
await client.query('ROLLBACK');
logger.error('PostgreSQL transaction error:', error);
throw error;
} finally {
client.release();
}
}
async exec(sql: string): Promise<void> {
try {
// Split multiple statements and execute them
const statements = sql.split(';').filter(s => s.trim().length > 0);
for (const statement of statements) {
if (statement.trim()) {
await this.pool.query(statement.trim());
}
}
} catch (error) {
logger.error('PostgreSQL exec error:', { sql, error });
throw error;
}
}
async close(): Promise<void> {
await this.pool.end();
}
async getSizeBytes(): Promise<number> {
try {
const result = await this.query<{ size: number }>(`
SELECT pg_database_size(current_database()) as size
`);
return result[0]?.size || 0;
} catch (error) {
logger.error('PostgreSQL getSizeBytes error:', error);
return 0;
}
}
/**
* Convert SQLite-style ? placeholders to PostgreSQL $1, $2, etc.
*/
private convertPlaceholders(sql: string): string {
let paramIndex = 1;
return sql.replace(/\?/g, () => `$${paramIndex++}`);
}
}

View File

@@ -0,0 +1,132 @@
/**
* SQLite Database Adapter
*
* Implements DatabaseAdapter for SQLite using 'better-sqlite3'.
* Maintains backward compatibility with existing SQLite code.
*/
import Database from 'better-sqlite3';
import { logger } from '../logger.js';
import type { DatabaseAdapter } from './interface.js';
import * as fs from 'fs';
export class SqliteAdapter implements DatabaseAdapter {
private db: Database.Database;
private dbPath: string;
constructor(dbPath: string) {
this.dbPath = dbPath;
// Ensure directory exists
const dir = require('path').dirname(dbPath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
this.db = new Database(dbPath);
// Enable foreign keys and WAL mode for better concurrency
this.db.pragma('foreign_keys = ON');
this.db.pragma('journal_mode = WAL');
}
async query<T = any>(sql: string, params?: any[]): Promise<T[]> {
try {
const stmt = this.db.prepare(sql);
const rows = stmt.all(...(params || [])) as T[];
return rows;
} catch (error) {
logger.error('SQLite query error:', { sql, params, error });
throw error;
}
}
async queryOne<T = any>(sql: string, params?: any[]): Promise<T | null> {
try {
const stmt = this.db.prepare(sql);
const row = stmt.get(...(params || [])) as T | undefined;
return row || null;
} catch (error) {
logger.error('SQLite queryOne error:', { sql, params, error });
throw error;
}
}
async execute(sql: string, params?: any[]): Promise<number> {
try {
const stmt = this.db.prepare(sql);
const result = stmt.run(...(params || []));
return result.changes || 0;
} catch (error) {
logger.error('SQLite execute error:', { sql, params, error });
throw error;
}
}
async transaction<T>(callback: (db: DatabaseAdapter) => Promise<T>): Promise<T> {
// SQLite transactions using better-sqlite3
const transactionFn = this.db.transaction((cb: (db: DatabaseAdapter) => Promise<T>) => {
// Create a transaction-scoped adapter
const transactionAdapter: DatabaseAdapter = {
query: async (sql: string, params?: any[]) => {
const stmt = this.db.prepare(sql);
return stmt.all(...(params || [])) as any[];
},
queryOne: async <T = any>(sql: string, params?: any[]): Promise<T | null> => {
const stmt = this.db.prepare(sql);
const row = stmt.get(...(params || [])) as T | undefined;
return row || null;
},
execute: async (sql: string, params?: any[]) => {
const stmt = this.db.prepare(sql);
const result = stmt.run(...(params || []));
return result.changes || 0;
},
transaction: async (cb) => {
// Nested transactions - just execute in same transaction
return cb(transactionAdapter);
},
exec: async (sql: string) => {
this.db.exec(sql);
},
close: async () => {
// Don't close in nested transaction
},
};
return cb(transactionAdapter);
});
try {
return await Promise.resolve(transactionFn(callback));
} catch (error) {
logger.error('SQLite transaction error:', error);
throw error;
}
}
async exec(sql: string): Promise<void> {
try {
this.db.exec(sql);
} catch (error) {
logger.error('SQLite exec error:', { sql, error });
throw error;
}
}
async close(): Promise<void> {
this.db.close();
}
async getSizeBytes(): Promise<number> {
try {
if (fs.existsSync(this.dbPath)) {
const stats = fs.statSync(this.dbPath);
return stats.size;
}
return 0;
} catch (error) {
logger.error('SQLite getSizeBytes error:', error);
return 0;
}
}
}

View File

@@ -97,6 +97,8 @@ class JiraAssetsService {
private complexityFactorsCache: Map<string, ReferenceValue> | null = null;
// Cache: Number of Users with factors
private numberOfUsersCache: Map<string, ReferenceValue> | null = null;
// Cache: Reference objects fetched via fallback (key: objectKey -> ReferenceValue)
private referenceObjectCache: Map<string, ReferenceValue> = new Map();
// Cache: Team dashboard data
private teamDashboardCache: { data: TeamDashboardData; timestamp: number } | null = null;
private readonly TEAM_DASHBOARD_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
@@ -414,11 +416,11 @@ class JiraAssetsService {
}
// Get reference value with schema fallback for attribute lookup
private getReferenceValueWithSchema(
private async getReferenceValueWithSchema(
obj: JiraAssetsObject,
attributeName: string,
attrSchema?: Map<number, string>
): ReferenceValue | null {
): Promise<ReferenceValue | null> {
const attr = this.getAttributeByName(obj, attributeName, attrSchema);
if (!attr || attr.objectAttributeValues.length === 0) {
return null;
@@ -432,6 +434,50 @@ class JiraAssetsService {
name: value.referencedObject.label,
};
}
// Fallback: if referencedObject is missing but we have a value, try to fetch it separately
// Note: value.value might be an object key (e.g., "GOV-A") or an object ID
if (value.value && !value.referencedObject) {
// Check cache first
const cached = this.referenceObjectCache.get(value.value);
if (cached) {
return cached;
}
try {
// Try to fetch the referenced object by its key or ID
// First try as object key (most common)
let refObj: JiraAssetsObject | null = null;
try {
refObj = await this.request<JiraAssetsObject>(`/object/${value.value}`);
} catch (keyError) {
// If that fails, try as object ID
try {
refObj = await this.request<JiraAssetsObject>(`/object/${parseInt(value.value, 10)}`);
} catch (idError) {
// Both failed, log and continue
logger.debug(`getReferenceValueWithSchema: Could not fetch referenced object for value "${value.value}" (tried as key and ID) for attribute "${attributeName}" on object ${obj.objectKey}`);
}
}
if (refObj) {
const refValue: ReferenceValue = {
objectId: refObj.id.toString(),
key: refObj.objectKey,
name: refObj.label,
};
// Cache it for future use
this.referenceObjectCache.set(value.value, refValue);
this.referenceObjectCache.set(refObj.objectKey, refValue);
this.referenceObjectCache.set(refObj.id.toString(), refValue);
return refValue;
}
} catch (error) {
// If fetching fails, log but don't throw - just return null
logger.debug(`getReferenceValueWithSchema: Failed to fetch referenced object ${value.value} for attribute "${attributeName}" on object ${obj.objectKey}`, error);
}
}
return null;
}
@@ -511,26 +557,27 @@ class JiraAssetsService {
// Ensure factor caches are populated (should be instant after initial population)
await this.ensureFactorCaches();
// Get reference values and enrich with factors
const dynamicsFactor = this.enrichWithFactor(
// Get reference values and enrich with factors (now async)
const [dynamicsFactorRaw, complexityFactorRaw, numberOfUsersRaw, governanceModel, applicationType, businessImpactAnalyse, applicationManagementHosting, applicationManagementTAM, applicationSubteam, platform] = await Promise.all([
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.DYNAMICS_FACTOR, attrSchema),
this.dynamicsFactorsCache
);
const complexityFactor = this.enrichWithFactor(
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.COMPLEXITY_FACTOR, attrSchema),
this.complexityFactorsCache
);
const numberOfUsers = this.enrichWithFactor(
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.NUMBER_OF_USERS, attrSchema),
this.numberOfUsersCache
);
// Get other reference values needed for list view and effort calculation
const governanceModel = this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.GOVERNANCE_MODEL, attrSchema);
const applicationType = this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_TYPE, attrSchema);
const businessImpactAnalyse = this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.BUSINESS_IMPACT_ANALYSE, attrSchema);
const applicationManagementHosting = this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_MANAGEMENT_HOSTING, attrSchema);
const applicationManagementTAM = this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_MANAGEMENT_TAM, attrSchema);
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.GOVERNANCE_MODEL, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_TYPE, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.BUSINESS_IMPACT_ANALYSE, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_MANAGEMENT_HOSTING, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_MANAGEMENT_TAM, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_SUBTEAM, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.PLATFORM, attrSchema),
]);
if (!governanceModel && obj.objectKey) {
logger.debug(`parseJiraObject: No governanceModel found for ${obj.objectKey}. Attribute name: ${ATTRIBUTE_NAMES.GOVERNANCE_MODEL}`);
}
const dynamicsFactor = this.enrichWithFactor(dynamicsFactorRaw, this.dynamicsFactorsCache);
const complexityFactor = this.enrichWithFactor(complexityFactorRaw, this.complexityFactorsCache);
const numberOfUsers = this.enrichWithFactor(numberOfUsersRaw, this.numberOfUsersCache);
// Get override FTE
const overrideFTE = (() => {
@@ -589,10 +636,10 @@ class JiraAssetsService {
dynamicsFactor,
complexityFactor,
// "Application Management - Subteam" on ApplicationComponent references Subteam objects
applicationSubteam: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_SUBTEAM, attrSchema),
applicationSubteam,
applicationTeam: null, // Team is looked up via Subteam, not directly on ApplicationComponent
applicationType,
platform: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.PLATFORM, attrSchema),
platform,
requiredEffortApplicationManagement: effortResult.finalEffort,
minFTE: effortResult.minFTE,
maxFTE: effortResult.maxFTE,
@@ -610,19 +657,37 @@ class JiraAssetsService {
// Ensure factor caches are populated
await this.ensureFactorCaches();
// Get reference values and enrich with factors
const dynamicsFactor = this.enrichWithFactor(
// Get all reference values in parallel (now async)
const [
dynamicsFactorRaw,
complexityFactorRaw,
numberOfUsersRaw,
hostingType,
businessImpactAnalyse,
governanceModel,
applicationSubteam,
applicationType,
platform,
applicationManagementHosting,
applicationManagementTAM,
] = await Promise.all([
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.DYNAMICS_FACTOR, attrSchema),
this.dynamicsFactorsCache
);
const complexityFactor = this.enrichWithFactor(
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.COMPLEXITY_FACTOR, attrSchema),
this.complexityFactorsCache
);
const numberOfUsers = this.enrichWithFactor(
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.NUMBER_OF_USERS, attrSchema),
this.numberOfUsersCache
);
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.HOSTING_TYPE, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.BUSINESS_IMPACT_ANALYSE, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.GOVERNANCE_MODEL, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_SUBTEAM, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_TYPE, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.PLATFORM, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_MANAGEMENT_HOSTING, attrSchema),
this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_MANAGEMENT_TAM, attrSchema),
]);
// Enrich with factors
const dynamicsFactor = this.enrichWithFactor(dynamicsFactorRaw, this.dynamicsFactorsCache);
const complexityFactor = this.enrichWithFactor(complexityFactorRaw, this.complexityFactorsCache);
const numberOfUsers = this.enrichWithFactor(numberOfUsersRaw, this.numberOfUsersCache);
const applicationDetails: ApplicationDetails = {
id: obj.id.toString(),
@@ -632,10 +697,10 @@ class JiraAssetsService {
description: rawDescription ? stripHtmlTags(rawDescription) : null,
supplierProduct: this.getAttributeValueWithSchema(obj, ATTRIBUTE_NAMES.SUPPLIER_PRODUCT, attrSchema),
organisation: this.getAttributeValueWithSchema(obj, ATTRIBUTE_NAMES.ORGANISATION, attrSchema),
hostingType: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.HOSTING_TYPE, attrSchema),
hostingType,
status: this.getAttributeValueWithSchema(obj, ATTRIBUTE_NAMES.STATUS, attrSchema) as ApplicationStatus | null,
businessImportance: this.getAttributeValueWithSchema(obj, ATTRIBUTE_NAMES.BUSINESS_IMPORTANCE, attrSchema),
businessImpactAnalyse: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.BUSINESS_IMPACT_ANALYSE, attrSchema),
businessImpactAnalyse,
systemOwner: this.getAttributeValueWithSchema(obj, ATTRIBUTE_NAMES.SYSTEM_OWNER, attrSchema),
businessOwner: this.getAttributeValueWithSchema(obj, ATTRIBUTE_NAMES.BUSINESS_OWNER, attrSchema),
functionalApplicationManagement: this.getAttributeValueWithSchema(obj, ATTRIBUTE_NAMES.FAM, attrSchema),
@@ -648,12 +713,12 @@ class JiraAssetsService {
dynamicsFactor,
complexityFactor,
numberOfUsers,
governanceModel: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.GOVERNANCE_MODEL, attrSchema),
governanceModel,
// "Application Management - Subteam" on ApplicationComponent references Subteam objects
applicationSubteam: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_SUBTEAM, attrSchema),
applicationSubteam,
applicationTeam: null, // Team is looked up via Subteam, not directly on ApplicationComponent
applicationType: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_TYPE, attrSchema),
platform: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.PLATFORM, attrSchema),
applicationType,
platform,
requiredEffortApplicationManagement: null,
technischeArchitectuur: this.getAttributeValueWithSchema(obj, ATTRIBUTE_NAMES.TECHNISCHE_ARCHITECTUUR, attrSchema),
overrideFTE: (() => {
@@ -662,8 +727,8 @@ class JiraAssetsService {
const parsed = parseFloat(value);
return isNaN(parsed) ? null : parsed;
})(),
applicationManagementHosting: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_MANAGEMENT_HOSTING, attrSchema),
applicationManagementTAM: this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.APPLICATION_MANAGEMENT_TAM, attrSchema),
applicationManagementHosting,
applicationManagementTAM,
};
// Calculate required effort application management
@@ -1417,8 +1482,8 @@ class JiraAssetsService {
for (const obj of response.objectEntries) {
const subteamId = obj.id.toString();
// Get the Team reference from the Subteam
const teamRef = this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.SUBTEAM_TEAM, attrSchema);
// Get the Team reference from the Subteam (now async)
const teamRef = await this.getReferenceValueWithSchema(obj, ATTRIBUTE_NAMES.SUBTEAM_TEAM, attrSchema);
// Enrich the team reference with Type attribute if available
if (teamRef) {
@@ -1649,8 +1714,8 @@ class JiraAssetsService {
const entries = batchResponse.objectEntries || [];
totalFetched += entries.length;
// Process each application in the batch
for (const obj of entries) {
// Process each application in the batch (now async for governance model lookup)
await Promise.all(entries.map(async (obj) => {
// Count by status (STATUS is a string/select value, not a reference)
const status = this.getAttributeValueWithSchema(
obj,
@@ -1673,8 +1738,8 @@ class JiraAssetsService {
classifiedCount++;
}
// Count by governance model
const governanceModel = this.getReferenceValueWithSchema(
// Count by governance model (now async)
const governanceModel = await this.getReferenceValueWithSchema(
obj,
ATTRIBUTE_NAMES.GOVERNANCE_MODEL,
attrSchema
@@ -1686,7 +1751,7 @@ class JiraAssetsService {
byGovernanceModel['Geen regiemodel'] =
(byGovernanceModel['Geen regiemodel'] || 0) + 1;
}
}
}));
// Check if there are more pages
hasMore =

View File

@@ -219,6 +219,29 @@ class JiraAssetsClient {
};
}
/**
* Get the total count of objects for a specific type from Jira Assets
* This is more efficient than fetching all objects when you only need the count
*/
async getObjectCount(typeName: CMDBObjectTypeName): Promise<number> {
const typeDef = OBJECT_TYPES[typeName];
if (!typeDef) {
logger.warn(`JiraAssetsClient: Unknown type ${typeName}`);
return 0;
}
try {
const iql = `objectType = "${typeDef.name}"`;
// Use pageSize=1 to minimize data transfer, we only need the totalCount
const result = await this.searchObjects(iql, 1, 1);
logger.debug(`JiraAssetsClient: ${typeName} has ${result.totalCount} objects in Jira Assets`);
return result.totalCount;
} catch (error) {
logger.error(`JiraAssetsClient: Failed to get count for ${typeName}`, error);
return 0;
}
}
async getAllObjectsOfType(
typeName: CMDBObjectTypeName,
batchSize: number = 40
@@ -292,12 +315,13 @@ class JiraAssetsClient {
const typeName = TYPE_ID_TO_NAME[typeId] || JIRA_NAME_TO_TYPE[jiraObj.objectType?.name];
if (!typeName) {
logger.warn(`JiraAssetsClient: Unknown object type: ${jiraObj.objectType?.name} (ID: ${typeId})`);
logger.warn(`JiraAssetsClient: Unknown object type for object ${jiraObj.objectKey || jiraObj.id}: ${jiraObj.objectType?.name} (ID: ${typeId})`);
return null;
}
const typeDef = OBJECT_TYPES[typeName];
if (!typeDef) {
logger.warn(`JiraAssetsClient: Type definition not found for type: ${typeName} (object: ${jiraObj.objectKey || jiraObj.id})`);
return null;
}

View File

@@ -397,9 +397,11 @@ export class MockDataService {
// Apply status filter
if (filters.statuses && filters.statuses.length > 0) {
filtered = filtered.filter((app) =>
app.status ? filters.statuses!.includes(app.status) : false
);
filtered = filtered.filter((app) => {
// Handle empty/null status - treat as 'Undefined' for filtering
const status = app.status || 'Undefined';
return filters.statuses!.includes(status as ApplicationStatus);
});
}
// Apply applicationFunction filter

View File

@@ -91,7 +91,7 @@ class SyncEngine {
this.isRunning = true;
// Check if we need a full sync
const stats = cacheStore.getStats();
const stats = await cacheStore.getStats();
const lastFullSync = stats.lastFullSync;
const needsFullSync = !stats.isWarm || !lastFullSync || this.isStale(lastFullSync, 24 * 60 * 60 * 1000);
@@ -175,8 +175,8 @@ class SyncEngine {
// Update sync metadata
const now = new Date().toISOString();
cacheStore.setSyncMetadata('lastFullSync', now);
cacheStore.setSyncMetadata('lastIncrementalSync', now);
await cacheStore.setSyncMetadata('lastFullSync', now);
await cacheStore.setSyncMetadata('lastIncrementalSync', now);
this.lastIncrementalSync = new Date();
const duration = Date.now() - startTime;
@@ -223,31 +223,52 @@ class SyncEngine {
// Fetch all objects from Jira
const jiraObjects = await jiraAssetsClient.getAllObjectsOfType(typeName, this.batchSize);
logger.info(`SyncEngine: Fetched ${jiraObjects.length} ${typeName} objects from Jira`);
// Parse and cache objects
const parsedObjects: CMDBObject[] = [];
const failedObjects: Array<{ id: string; key: string; label: string; reason: string }> = [];
for (const jiraObj of jiraObjects) {
const parsed = jiraAssetsClient.parseObject(jiraObj);
if (parsed) {
parsedObjects.push(parsed);
} else {
// Track objects that failed to parse
failedObjects.push({
id: jiraObj.id?.toString() || 'unknown',
key: jiraObj.objectKey || 'unknown',
label: jiraObj.label || 'unknown',
reason: 'parseObject returned null',
});
logger.warn(`SyncEngine: Failed to parse ${typeName} object: ${jiraObj.objectKey || jiraObj.id} (${jiraObj.label || 'unknown label'})`);
}
}
// Log parsing statistics
if (failedObjects.length > 0) {
logger.warn(`SyncEngine: ${failedObjects.length} ${typeName} objects failed to parse:`, failedObjects.map(o => `${o.key} (${o.label})`).join(', '));
}
// Batch upsert to cache
if (parsedObjects.length > 0) {
cacheStore.batchUpsertObjects(typeName, parsedObjects);
await cacheStore.batchUpsertObjects(typeName, parsedObjects);
objectsProcessed = parsedObjects.length;
// Extract relations
for (const obj of parsedObjects) {
cacheStore.extractAndStoreRelations(typeName, obj);
await cacheStore.extractAndStoreRelations(typeName, obj);
relationsExtracted++;
}
}
const duration = Date.now() - startTime;
logger.debug(`SyncEngine: Synced ${objectsProcessed} ${typeName} objects in ${duration}ms`);
const skippedCount = jiraObjects.length - objectsProcessed;
if (skippedCount > 0) {
logger.warn(`SyncEngine: Synced ${objectsProcessed}/${jiraObjects.length} ${typeName} objects in ${duration}ms (${skippedCount} skipped)`);
} else {
logger.debug(`SyncEngine: Synced ${objectsProcessed} ${typeName} objects in ${duration}ms`);
}
return {
objectType: typeName,
@@ -304,7 +325,7 @@ class SyncEngine {
try {
// Get the last sync time
const lastSyncStr = cacheStore.getSyncMetadata('lastIncrementalSync');
const lastSyncStr = await cacheStore.getSyncMetadata('lastIncrementalSync');
const since = lastSyncStr
? new Date(lastSyncStr)
: new Date(Date.now() - 60000); // Default: last minute
@@ -317,7 +338,7 @@ class SyncEngine {
// If no objects returned (e.g., Data Center doesn't support IQL incremental sync),
// check if we should trigger a full sync instead
if (updatedObjects.length === 0) {
const lastFullSyncStr = cacheStore.getSyncMetadata('lastFullSync');
const lastFullSyncStr = await cacheStore.getSyncMetadata('lastFullSync');
if (lastFullSyncStr) {
const lastFullSync = new Date(lastFullSyncStr);
const fullSyncAge = Date.now() - lastFullSync.getTime();
@@ -334,7 +355,7 @@ class SyncEngine {
// Update timestamp even if no objects were synced
const now = new Date();
cacheStore.setSyncMetadata('lastIncrementalSync', now.toISOString());
await cacheStore.setSyncMetadata('lastIncrementalSync', now.toISOString());
this.lastIncrementalSync = now;
return { success: true, updatedCount: 0 };
@@ -346,15 +367,15 @@ class SyncEngine {
const parsed = jiraAssetsClient.parseObject(jiraObj);
if (parsed) {
const typeName = parsed._objectType as CMDBObjectTypeName;
cacheStore.upsertObject(typeName, parsed);
cacheStore.extractAndStoreRelations(typeName, parsed);
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
updatedCount++;
}
}
// Update sync metadata
const now = new Date();
cacheStore.setSyncMetadata('lastIncrementalSync', now.toISOString());
await cacheStore.setSyncMetadata('lastIncrementalSync', now.toISOString());
this.lastIncrementalSync = now;
if (updatedCount > 0) {
@@ -413,14 +434,14 @@ class SyncEngine {
const parsed = jiraAssetsClient.parseObject(jiraObj);
if (!parsed) return false;
cacheStore.upsertObject(typeName, parsed);
cacheStore.extractAndStoreRelations(typeName, parsed);
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
return true;
} catch (error) {
// If object was deleted from Jira, remove it from our cache
if (error instanceof JiraObjectNotFoundError) {
const deleted = cacheStore.deleteObject(typeName, objectId);
const deleted = await cacheStore.deleteObject(typeName, objectId);
if (deleted) {
logger.info(`SyncEngine: Removed deleted object ${typeName}/${objectId} from cache`);
}
@@ -438,8 +459,8 @@ class SyncEngine {
/**
* Get current sync engine status
*/
getStatus(): SyncEngineStatus {
const stats = cacheStore.getStats();
async getStatus(): Promise<SyncEngineStatus> {
const stats = await cacheStore.getStats();
let nextIncrementalSync: string | null = null;
if (this.isRunning && this.lastIncrementalSync) {

View File

@@ -47,8 +47,10 @@ export interface ApplicationListItem {
minFTE?: number | null; // Minimum FTE from configuration range
maxFTE?: number | null; // Maximum FTE from configuration range
overrideFTE?: number | null; // Override FTE value (if set, overrides calculated value)
businessImpactAnalyse?: ReferenceValue | null; // Business Impact Analyse
applicationManagementHosting?: ReferenceValue | null; // Application Management - Hosting
applicationManagementTAM?: ReferenceValue | null; // Application Management - TAM
dataCompletenessPercentage?: number; // Data completeness percentage (0-100)
}
// Full application details
@@ -85,6 +87,7 @@ export interface ApplicationDetails {
applicationManagementHosting?: ReferenceValue | null; // Application Management - Hosting
applicationManagementTAM?: ReferenceValue | null; // Application Management - TAM
technischeArchitectuur?: string | null; // URL to Technical Architecture document (Attribute ID 572)
dataCompletenessPercentage?: number; // Data completeness percentage (0-100)
}
// Search filters
@@ -227,9 +230,12 @@ export interface ZiraTaxonomy {
// Dashboard statistics
export interface DashboardStats {
totalApplications: number;
totalApplications: number; // Excluding Closed/Deprecated
totalAllApplications: number; // Including all statuses
classifiedCount: number;
unclassifiedCount: number;
withApplicationFunction?: number;
applicationFunctionPercentage?: number;
byStatus: Record<string, number>;
byDomain: Record<string, number>;
byGovernanceModel: Record<string, number>;
@@ -353,6 +359,8 @@ export interface JiraAssetsObject {
name: string;
};
attributes: JiraAssetsAttribute[];
updated?: string;
created?: string;
}
export interface JiraAssetsAttribute {
@@ -433,3 +441,27 @@ export interface ChatResponse {
message: ChatMessage;
suggestion?: AISuggestion; // Updated suggestion if AI provided one
}
// Data Completeness Configuration
export interface CompletenessFieldConfig {
id: string; // Unique identifier for the field within the category
name: string; // Display name (e.g., "Organisation", "ApplicationFunction")
fieldPath: string; // Path in ApplicationDetails object (e.g., "organisation", "applicationFunctions")
enabled: boolean; // Whether this field is included in completeness check
}
export interface CompletenessCategoryConfig {
id: string; // Unique identifier for the category
name: string;
description: string;
fields: CompletenessFieldConfig[];
}
export interface DataCompletenessConfig {
metadata: {
version: string;
description: string;
lastUpdated: string;
};
categories: CompletenessCategoryConfig[]; // Array of categories (dynamic)
}