UI styling improvements: dashboard headers and navigation

- Restore blue PageHeader on Dashboard (/app-components)
- Update homepage (/) with subtle header design without blue bar
- Add uniform PageHeader styling to application edit page
- Fix Rapporten link on homepage to point to /reports overview
- Improve header descriptions spacing for better readability
This commit is contained in:
2026-01-21 03:24:56 +01:00
parent e276e77fbc
commit cdee0e8819
138 changed files with 24551 additions and 3352 deletions

View File

@@ -9,9 +9,13 @@
"build": "tsc",
"start": "node dist/index.js",
"generate-schema": "tsx scripts/generate-schema.ts",
"generate-types": "tsx scripts/generate-types-from-db.ts",
"discover-schema": "tsx scripts/discover-schema.ts",
"migrate": "tsx scripts/run-migrations.ts",
"check-admin": "tsx scripts/check-admin-user.ts",
"migrate:sqlite-to-postgres": "tsx scripts/migrate-sqlite-to-postgres.ts"
"migrate:sqlite-to-postgres": "tsx scripts/migrate-sqlite-to-postgres.ts",
"migrate:search-enabled": "tsx scripts/migrate-search-enabled.ts",
"setup-schema-mappings": "tsx scripts/setup-schema-mappings.ts"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.32.1",

View File

@@ -0,0 +1,38 @@
#!/usr/bin/env npx tsx
/**
* Schema Discovery CLI
*
* Manually trigger schema discovery from Jira Assets API.
* This script fetches the schema and stores it in the database.
*
* Usage: npm run discover-schema
*/
import { schemaDiscoveryService } from '../src/services/schemaDiscoveryService.js';
import { schemaCacheService } from '../src/services/schemaCacheService.js';
import { logger } from '../src/services/logger.js';
async function main() {
try {
console.log('Starting schema discovery...');
logger.info('Schema Discovery CLI: Starting manual schema discovery');
// Force discovery (ignore cache)
await schemaDiscoveryService.discoverAndStoreSchema(true);
// Invalidate cache so next request gets fresh data
schemaCacheService.invalidate();
console.log('✅ Schema discovery completed successfully!');
logger.info('Schema Discovery CLI: Schema discovery completed successfully');
process.exit(0);
} catch (error) {
console.error('❌ Schema discovery failed:', error);
logger.error('Schema Discovery CLI: Schema discovery failed', error);
process.exit(1);
}
}
main();

View File

@@ -752,18 +752,12 @@ function generateDatabaseSchema(generatedAt: Date): string {
'-- =============================================================================',
'-- Core Tables',
'-- =============================================================================',
'',
'-- Cached CMDB objects (all types stored in single table with JSON data)',
'CREATE TABLE IF NOT EXISTS cached_objects (',
' id TEXT PRIMARY KEY,',
' object_key TEXT NOT NULL UNIQUE,',
' object_type TEXT NOT NULL,',
' label TEXT NOT NULL,',
' data JSON NOT NULL,',
' jira_updated_at TEXT,',
' jira_created_at TEXT,',
' cached_at TEXT NOT NULL',
');',
'--',
'-- NOTE: This schema is LEGACY and deprecated.',
'-- The current system uses the normalized schema defined in',
'-- backend/src/services/database/normalized-schema.ts',
'--',
'-- This file is kept for reference and migration purposes only.',
'',
'-- Object relations (references between objects)',
'CREATE TABLE IF NOT EXISTS object_relations (',
@@ -787,10 +781,6 @@ function generateDatabaseSchema(generatedAt: Date): string {
'-- Indices for Performance',
'-- =============================================================================',
'',
'CREATE INDEX IF NOT EXISTS idx_objects_type ON cached_objects(object_type);',
'CREATE INDEX IF NOT EXISTS idx_objects_key ON cached_objects(object_key);',
'CREATE INDEX IF NOT EXISTS idx_objects_updated ON cached_objects(jira_updated_at);',
'CREATE INDEX IF NOT EXISTS idx_objects_label ON cached_objects(label);',
'',
'CREATE INDEX IF NOT EXISTS idx_relations_source ON object_relations(source_id);',
'CREATE INDEX IF NOT EXISTS idx_relations_target ON object_relations(target_id);',

View File

@@ -0,0 +1,484 @@
#!/usr/bin/env npx tsx
/**
* Type Generation Script - Database to TypeScript
*
* Generates TypeScript types from database schema.
* This script reads the schema from the database (object_types, attributes)
* and generates:
* - TypeScript types (jira-types.ts)
* - Schema metadata (jira-schema.ts)
*
* Usage: npm run generate-types
*/
import * as fs from 'fs';
import * as path from 'path';
import { fileURLToPath } from 'url';
import { createDatabaseAdapter } from '../src/services/database/factory.js';
import type { AttributeDefinition } from '../src/generated/jira-schema.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const OUTPUT_DIR = path.resolve(__dirname, '../src/generated');
interface DatabaseObjectType {
jira_type_id: number;
type_name: string;
display_name: string;
description: string | null;
sync_priority: number;
object_count: number;
}
interface DatabaseAttribute {
jira_attr_id: number;
object_type_name: string;
attr_name: string;
field_name: string;
attr_type: string;
is_multiple: boolean | number;
is_editable: boolean | number;
is_required: boolean | number;
is_system: boolean | number;
reference_type_name: string | null;
description: string | null;
}
function generateTypeScriptType(attrType: string, isMultiple: boolean, isReference: boolean): string {
let tsType: string;
if (isReference) {
tsType = 'ObjectReference';
} else {
switch (attrType) {
case 'text':
case 'textarea':
case 'url':
case 'email':
case 'select':
case 'user':
case 'status':
tsType = 'string';
break;
case 'integer':
case 'float':
tsType = 'number';
break;
case 'boolean':
tsType = 'boolean';
break;
case 'date':
case 'datetime':
tsType = 'string'; // ISO date string
break;
default:
tsType = 'unknown';
}
}
if (isMultiple) {
return `${tsType}[]`;
}
return `${tsType} | null`;
}
function escapeString(str: string): string {
return str.replace(/'/g, "\\'").replace(/\n/g, ' ');
}
function generateTypesFile(objectTypes: Array<{
jiraTypeId: number;
name: string;
typeName: string;
objectCount: number;
attributes: AttributeDefinition[];
}>, generatedAt: Date): string {
const lines: string[] = [
'// AUTO-GENERATED FILE - DO NOT EDIT MANUALLY',
'// Generated from database schema',
`// Generated at: ${generatedAt.toISOString()}`,
'//',
'// Re-generate with: npm run generate-types',
'',
'// =============================================================================',
'// Base Types',
'// =============================================================================',
'',
'/** Reference to another CMDB object */',
'export interface ObjectReference {',
' objectId: string;',
' objectKey: string;',
' label: string;',
' // Optional enriched data from referenced object',
' factor?: number;',
'}',
'',
'/** Base interface for all CMDB objects */',
'export interface BaseCMDBObject {',
' id: string;',
' objectKey: string;',
' label: string;',
' _objectType: string;',
' _jiraUpdatedAt: string;',
' _jiraCreatedAt: string;',
'}',
'',
'// =============================================================================',
'// Object Type Interfaces',
'// =============================================================================',
'',
];
for (const objType of objectTypes) {
lines.push(`/** ${objType.name} (Jira Type ID: ${objType.jiraTypeId}, ${objType.objectCount} objects) */`);
lines.push(`export interface ${objType.typeName} extends BaseCMDBObject {`);
lines.push(` _objectType: '${objType.typeName}';`);
lines.push('');
// Group attributes by type
const scalarAttrs = objType.attributes.filter(a => a.type !== 'reference');
const refAttrs = objType.attributes.filter(a => a.type === 'reference');
if (scalarAttrs.length > 0) {
lines.push(' // Scalar attributes');
for (const attr of scalarAttrs) {
const tsType = generateTypeScriptType(attr.type, attr.isMultiple, false);
const comment = attr.description ? ` // ${attr.description}` : '';
lines.push(` ${attr.fieldName}: ${tsType};${comment}`);
}
lines.push('');
}
if (refAttrs.length > 0) {
lines.push(' // Reference attributes');
for (const attr of refAttrs) {
const tsType = generateTypeScriptType(attr.type, attr.isMultiple, true);
const comment = attr.referenceTypeName ? ` // -> ${attr.referenceTypeName}` : '';
lines.push(` ${attr.fieldName}: ${tsType};${comment}`);
}
lines.push('');
}
lines.push('}');
lines.push('');
}
// Generate union type
lines.push('// =============================================================================');
lines.push('// Union Types');
lines.push('// =============================================================================');
lines.push('');
lines.push('/** Union of all CMDB object types */');
lines.push('export type CMDBObject =');
for (let i = 0; i < objectTypes.length; i++) {
const suffix = i < objectTypes.length - 1 ? '' : ';';
lines.push(` | ${objectTypes[i].typeName}${suffix}`);
}
lines.push('');
// Generate type name literal union
lines.push('/** All valid object type names */');
lines.push('export type CMDBObjectTypeName =');
for (let i = 0; i < objectTypes.length; i++) {
const suffix = i < objectTypes.length - 1 ? '' : ';';
lines.push(` | '${objectTypes[i].typeName}'${suffix}`);
}
lines.push('');
// Generate type guards
lines.push('// =============================================================================');
lines.push('// Type Guards');
lines.push('// =============================================================================');
lines.push('');
for (const objType of objectTypes) {
lines.push(`export function is${objType.typeName}(obj: CMDBObject): obj is ${objType.typeName} {`);
lines.push(` return obj._objectType === '${objType.typeName}';`);
lines.push('}');
lines.push('');
}
return lines.join('\n');
}
function generateSchemaFile(objectTypes: Array<{
jiraTypeId: number;
name: string;
typeName: string;
syncPriority: number;
objectCount: number;
attributes: AttributeDefinition[];
}>, generatedAt: Date): string {
const lines: string[] = [
'// AUTO-GENERATED FILE - DO NOT EDIT MANUALLY',
'// Generated from database schema',
`// Generated at: ${generatedAt.toISOString()}`,
'//',
'// Re-generate with: npm run generate-types',
'',
'// =============================================================================',
'// Schema Type Definitions',
'// =============================================================================',
'',
'export interface AttributeDefinition {',
' jiraId: number;',
' name: string;',
' fieldName: string;',
" type: 'text' | 'integer' | 'float' | 'boolean' | 'date' | 'datetime' | 'select' | 'reference' | 'url' | 'email' | 'textarea' | 'user' | 'status' | 'unknown';",
' isMultiple: boolean;',
' isEditable: boolean;',
' isRequired: boolean;',
' isSystem: boolean;',
' referenceTypeId?: number;',
' referenceTypeName?: string;',
' description?: string;',
'}',
'',
'export interface ObjectTypeDefinition {',
' jiraTypeId: number;',
' name: string;',
' typeName: string;',
' syncPriority: number;',
' objectCount: number;',
' attributes: AttributeDefinition[];',
'}',
'',
'// =============================================================================',
'// Schema Metadata',
'// =============================================================================',
'',
`export const SCHEMA_GENERATED_AT = '${generatedAt.toISOString()}';`,
`export const SCHEMA_OBJECT_TYPE_COUNT = ${objectTypes.length};`,
`export const SCHEMA_TOTAL_ATTRIBUTES = ${objectTypes.reduce((sum, ot) => sum + ot.attributes.length, 0)};`,
'',
'// =============================================================================',
'// Object Type Definitions',
'// =============================================================================',
'',
'export const OBJECT_TYPES: Record<string, ObjectTypeDefinition> = {',
];
for (let i = 0; i < objectTypes.length; i++) {
const objType = objectTypes[i];
const comma = i < objectTypes.length - 1 ? ',' : '';
lines.push(` '${objType.typeName}': {`);
lines.push(` jiraTypeId: ${objType.jiraTypeId},`);
lines.push(` name: '${escapeString(objType.name)}',`);
lines.push(` typeName: '${objType.typeName}',`);
lines.push(` syncPriority: ${objType.syncPriority},`);
lines.push(` objectCount: ${objType.objectCount},`);
lines.push(' attributes: [');
for (let j = 0; j < objType.attributes.length; j++) {
const attr = objType.attributes[j];
const attrComma = j < objType.attributes.length - 1 ? ',' : '';
let attrLine = ` { jiraId: ${attr.jiraId}, name: '${escapeString(attr.name)}', fieldName: '${attr.fieldName}', type: '${attr.type}', isMultiple: ${attr.isMultiple}, isEditable: ${attr.isEditable}, isRequired: ${attr.isRequired}, isSystem: ${attr.isSystem}`;
if (attr.referenceTypeName) {
attrLine += `, referenceTypeName: '${attr.referenceTypeName}'`;
}
if (attr.description) {
attrLine += `, description: '${escapeString(attr.description)}'`;
}
attrLine += ` }${attrComma}`;
lines.push(attrLine);
}
lines.push(' ],');
lines.push(` }${comma}`);
}
lines.push('};');
lines.push('');
// Generate lookup maps
lines.push('// =============================================================================');
lines.push('// Lookup Maps');
lines.push('// =============================================================================');
lines.push('');
// Type ID to name map
lines.push('/** Map from Jira Type ID to TypeScript type name */');
lines.push('export const TYPE_ID_TO_NAME: Record<number, string> = {');
for (const objType of objectTypes) {
lines.push(` ${objType.jiraTypeId}: '${objType.typeName}',`);
}
lines.push('};');
lines.push('');
// Type name to ID map
lines.push('/** Map from TypeScript type name to Jira Type ID */');
lines.push('export const TYPE_NAME_TO_ID: Record<string, number> = {');
for (const objType of objectTypes) {
lines.push(` '${objType.typeName}': ${objType.jiraTypeId},`);
}
lines.push('};');
lines.push('');
// Jira name to TypeScript name map
lines.push('/** Map from Jira object type name to TypeScript type name */');
lines.push('export const JIRA_NAME_TO_TYPE: Record<string, string> = {');
for (const objType of objectTypes) {
lines.push(` '${escapeString(objType.name)}': '${objType.typeName}',`);
}
lines.push('};');
lines.push('');
// Helper functions
lines.push('// =============================================================================');
lines.push('// Helper Functions');
lines.push('// =============================================================================');
lines.push('');
lines.push('/** Get attribute definition by type and field name */');
lines.push('export function getAttributeDefinition(typeName: string, fieldName: string): AttributeDefinition | undefined {');
lines.push(' const objectType = OBJECT_TYPES[typeName];');
lines.push(' if (!objectType) return undefined;');
lines.push(' return objectType.attributes.find(a => a.fieldName === fieldName);');
lines.push('}');
lines.push('');
lines.push('/** Get attribute definition by type and Jira attribute ID */');
lines.push('export function getAttributeById(typeName: string, jiraId: number): AttributeDefinition | undefined {');
lines.push(' const objectType = OBJECT_TYPES[typeName];');
lines.push(' if (!objectType) return undefined;');
lines.push(' return objectType.attributes.find(a => a.jiraId === jiraId);');
lines.push('}');
lines.push('');
lines.push('/** Get attribute definition by type and Jira attribute name */');
lines.push('export function getAttributeByName(typeName: string, attrName: string): AttributeDefinition | undefined {');
lines.push(' const objectType = OBJECT_TYPES[typeName];');
lines.push(' if (!objectType) return undefined;');
lines.push(' return objectType.attributes.find(a => a.name === attrName);');
lines.push('}');
lines.push('');
lines.push('/** Get attribute Jira ID by type and attribute name - throws if not found */');
lines.push('export function getAttributeId(typeName: string, attrName: string): number {');
lines.push(' const attr = getAttributeByName(typeName, attrName);');
lines.push(' if (!attr) {');
lines.push(' throw new Error(`Attribute "${attrName}" not found on type "${typeName}"`);');
lines.push(' }');
lines.push(' return attr.jiraId;');
lines.push('}');
lines.push('');
lines.push('/** Get all reference attributes for a type */');
lines.push('export function getReferenceAttributes(typeName: string): AttributeDefinition[] {');
lines.push(' const objectType = OBJECT_TYPES[typeName];');
lines.push(' if (!objectType) return [];');
lines.push(" return objectType.attributes.filter(a => a.type === 'reference');");
lines.push('}');
lines.push('');
lines.push('/** Get all object types sorted by sync priority */');
lines.push('export function getObjectTypesBySyncPriority(): ObjectTypeDefinition[] {');
lines.push(' return Object.values(OBJECT_TYPES).sort((a, b) => a.syncPriority - b.syncPriority);');
lines.push('}');
lines.push('');
return lines.join('\n');
}
async function main() {
const generatedAt = new Date();
console.log('');
console.log('╔════════════════════════════════════════════════════════════════╗');
console.log('║ Type Generation - Database to TypeScript ║');
console.log('╚════════════════════════════════════════════════════════════════╝');
console.log('');
try {
// Connect to database
const db = createDatabaseAdapter();
console.log('✓ Connected to database');
// Ensure schema is discovered first
const { schemaDiscoveryService } = await import('../src/services/schemaDiscoveryService.js');
await schemaDiscoveryService.discoverAndStoreSchema();
console.log('✓ Schema discovered from database');
// Fetch object types
const objectTypeRows = await db.query<DatabaseObjectType>(`
SELECT * FROM object_types
ORDER BY sync_priority, type_name
`);
console.log(`✓ Fetched ${objectTypeRows.length} object types`);
// Fetch attributes
const attributeRows = await db.query<DatabaseAttribute>(`
SELECT * FROM attributes
ORDER BY object_type_name, jira_attr_id
`);
console.log(`✓ Fetched ${attributeRows.length} attributes`);
// Build object types with attributes
const objectTypes = objectTypeRows.map(typeRow => {
const attributes = attributeRows
.filter(a => a.object_type_name === typeRow.type_name)
.map(attrRow => {
// Convert boolean/number for SQLite compatibility
const isMultiple = typeof attrRow.is_multiple === 'boolean' ? attrRow.is_multiple : attrRow.is_multiple === 1;
const isEditable = typeof attrRow.is_editable === 'boolean' ? attrRow.is_editable : attrRow.is_editable === 1;
const isRequired = typeof attrRow.is_required === 'boolean' ? attrRow.is_required : attrRow.is_required === 1;
const isSystem = typeof attrRow.is_system === 'boolean' ? attrRow.is_system : attrRow.is_system === 1;
return {
jiraId: attrRow.jira_attr_id,
name: attrRow.attr_name,
fieldName: attrRow.field_name,
type: attrRow.attr_type as AttributeDefinition['type'],
isMultiple,
isEditable,
isRequired,
isSystem,
referenceTypeName: attrRow.reference_type_name || undefined,
description: attrRow.description || undefined,
} as AttributeDefinition;
});
return {
jiraTypeId: typeRow.jira_type_id,
name: typeRow.display_name,
typeName: typeRow.type_name,
syncPriority: typeRow.sync_priority,
objectCount: typeRow.object_count,
attributes,
};
});
// Ensure output directory exists
if (!fs.existsSync(OUTPUT_DIR)) {
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
}
// Generate TypeScript types file
const typesContent = generateTypesFile(objectTypes, generatedAt);
const typesPath = path.join(OUTPUT_DIR, 'jira-types.ts');
fs.writeFileSync(typesPath, typesContent, 'utf-8');
console.log(`✓ Generated ${typesPath}`);
// Generate schema file
const schemaContent = generateSchemaFile(objectTypes, generatedAt);
const schemaPath = path.join(OUTPUT_DIR, 'jira-schema.ts');
fs.writeFileSync(schemaPath, schemaContent, 'utf-8');
console.log(`✓ Generated ${schemaPath}`);
console.log('');
console.log('✅ Type generation completed successfully!');
console.log(` Generated ${objectTypes.length} object types with ${objectTypes.reduce((sum, ot) => sum + ot.attributes.length, 0)} attributes`);
console.log('');
} catch (error) {
console.error('');
console.error('❌ Type generation failed:', error);
process.exit(1);
}
}
main();

View File

@@ -0,0 +1,90 @@
/**
* Migration script: Add search_enabled column to schemas table
*
* This script adds the search_enabled column to the schemas table if it doesn't exist.
*
* Usage:
* npm run migrate:search-enabled
* or
* tsx scripts/migrate-search-enabled.ts
*/
import { getDatabaseAdapter } from '../src/services/database/singleton.js';
import { logger } from '../src/services/logger.js';
async function main() {
try {
console.log('Starting migration: Adding search_enabled column to schemas table...');
const db = getDatabaseAdapter();
await db.ensureInitialized?.();
const isPostgres = db.isPostgres === true;
// Check if column exists and add it if it doesn't
if (isPostgres) {
// PostgreSQL: Check if column exists
const columnExists = await db.queryOne<{ exists: boolean }>(`
SELECT EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'schemas' AND column_name = 'search_enabled'
) as exists
`);
if (!columnExists?.exists) {
console.log('Adding search_enabled column to schemas table...');
await db.execute(`
ALTER TABLE schemas ADD COLUMN search_enabled BOOLEAN NOT NULL DEFAULT TRUE;
`);
console.log('✓ Column added successfully');
} else {
console.log('✓ Column already exists');
}
// Create index if it doesn't exist
try {
await db.execute(`
CREATE INDEX IF NOT EXISTS idx_schemas_search_enabled ON schemas(search_enabled);
`);
console.log('✓ Index created/verified');
} catch (error) {
console.log('Index may already exist, continuing...');
}
} else {
// SQLite: Try to query the column to see if it exists
try {
await db.queryOne('SELECT search_enabled FROM schemas LIMIT 1');
console.log('✓ Column already exists');
} catch {
// Column doesn't exist, add it
console.log('Adding search_enabled column to schemas table...');
await db.execute('ALTER TABLE schemas ADD COLUMN search_enabled INTEGER NOT NULL DEFAULT 1');
console.log('✓ Column added successfully');
}
// Create index if it doesn't exist
try {
await db.execute('CREATE INDEX IF NOT EXISTS idx_schemas_search_enabled ON schemas(search_enabled)');
console.log('✓ Index created/verified');
} catch (error) {
console.log('Index may already exist, continuing...');
}
}
// Verify the column exists
try {
await db.queryOne('SELECT search_enabled FROM schemas LIMIT 1');
console.log('✓ Migration completed successfully - search_enabled column verified');
} catch (error) {
console.error('✗ Migration verification failed:', error);
process.exit(1);
}
process.exit(0);
} catch (error) {
console.error('✗ Migration failed:', error);
process.exit(1);
}
}
main();

View File

@@ -66,7 +66,8 @@ async function migrateCacheDatabase(pg: Pool) {
const sqlite = new Database(SQLITE_CACHE_DB, { readonly: true });
try {
// Migrate cached_objects
// Migrate cached_objects (LEGACY - only for migrating old data from deprecated schema)
// Note: New databases use the normalized schema (objects + attribute_values tables)
const objects = sqlite.prepare('SELECT * FROM cached_objects').all() as any[];
console.log(` Migrating ${objects.length} cached objects...`);

View File

@@ -0,0 +1,178 @@
/**
* Setup Schema Mappings Script
*
* Configures schema mappings for object types based on the provided configuration.
* Run with: npm run setup-schema-mappings
*/
import { schemaMappingService } from '../src/services/schemaMappingService.js';
import { logger } from '../src/services/logger.js';
import { JIRA_NAME_TO_TYPE } from '../src/generated/jira-schema.js';
// Configuration: Schema ID -> Array of object type display names
const SCHEMA_MAPPINGS: Record<string, string[]> = {
'8': ['User'],
'6': [
'Application Component',
'Flows',
'Server',
'AzureSubscription',
'Certificate',
'Domain',
'Package',
'PackageBuild',
'Privileged User',
'Software',
'SoftwarePatch',
'Supplier',
'Application Management - Subteam',
'Application Management - Team',
'Measures',
'Rebootgroups',
'Application Management - Hosting',
'Application Management - Number of Users',
'Application Management - TAM',
'Application Management - Application Type',
'Application Management - Complexity Factor',
'Application Management - Dynamics Factor',
'ApplicationFunction',
'ApplicationFunctionCategory',
'Business Impact Analyse',
'Business Importance',
'Certificate ClassificationType',
'Certificate Type',
'Hosting Type',
'ICT Governance Model',
'Organisation',
],
};
async function setupSchemaMappings() {
logger.info('Setting up schema mappings...');
try {
let totalMappings = 0;
let skippedMappings = 0;
let errors = 0;
for (const [schemaId, objectTypeNames] of Object.entries(SCHEMA_MAPPINGS)) {
logger.info(`\nConfiguring schema ${schemaId} with ${objectTypeNames.length} object types...`);
for (const displayName of objectTypeNames) {
try {
// Convert display name to typeName
let typeName: string;
if (displayName === 'User') {
// User might not be in the generated schema, use 'User' directly
typeName = 'User';
// First, ensure User exists in object_types table
const { normalizedCacheStore } = await import('../src/services/normalizedCacheStore.js');
const db = (normalizedCacheStore as any).db;
await db.ensureInitialized?.();
// Check if User exists in object_types
const existing = await db.queryOne<{ type_name: string }>(`
SELECT type_name FROM object_types WHERE type_name = ?
`, [typeName]);
if (!existing) {
// Insert User into object_types (we'll use a placeholder jira_type_id)
// The actual jira_type_id will be discovered during schema discovery
logger.info(` Adding "User" to object_types table...`);
try {
await db.execute(`
INSERT INTO object_types (jira_type_id, type_name, display_name, description, sync_priority, object_count, discovered_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(jira_type_id) DO NOTHING
`, [
999999, // Placeholder ID - will be updated during schema discovery
'User',
'User',
'User object type from schema 8',
0,
0,
new Date().toISOString(),
new Date().toISOString()
]);
// Also try with type_name as unique constraint
await db.execute(`
INSERT INTO object_types (jira_type_id, type_name, display_name, description, sync_priority, object_count, discovered_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(type_name) DO UPDATE SET
display_name = excluded.display_name,
updated_at = excluded.updated_at
`, [
999999,
'User',
'User',
'User object type from schema 8',
0,
0,
new Date().toISOString(),
new Date().toISOString()
]);
logger.info(` ✓ Added "User" to object_types table`);
} catch (error: any) {
// If it already exists, that's fine
if (error.message?.includes('UNIQUE constraint') || error.message?.includes('duplicate key')) {
logger.info(` "User" already exists in object_types table`);
} else {
throw error;
}
}
}
} else {
// Look up typeName from JIRA_NAME_TO_TYPE mapping
typeName = JIRA_NAME_TO_TYPE[displayName];
if (!typeName) {
logger.warn(` ⚠️ Skipping "${displayName}" - typeName not found in schema`);
skippedMappings++;
continue;
}
}
// Set the mapping
await schemaMappingService.setMapping(typeName, schemaId, true);
logger.info(` ✓ Mapped ${typeName} (${displayName}) -> Schema ${schemaId}`);
totalMappings++;
} catch (error) {
logger.error(` ✗ Failed to map "${displayName}" to schema ${schemaId}:`, error);
errors++;
}
}
}
logger.info(`\n✅ Schema mappings setup complete!`);
logger.info(` - Total mappings created: ${totalMappings}`);
if (skippedMappings > 0) {
logger.info(` - Skipped (not found in schema): ${skippedMappings}`);
}
if (errors > 0) {
logger.info(` - Errors: ${errors}`);
}
// Clear cache to ensure fresh lookups
schemaMappingService.clearCache();
logger.info(`\n💾 Cache cleared - mappings are now active`);
} catch (error) {
logger.error('Failed to setup schema mappings:', error);
process.exit(1);
}
}
// Run the script
setupSchemaMappings()
.then(() => {
logger.info('\n✨ Done!');
process.exit(0);
})
.catch((error) => {
logger.error('Script failed:', error);
process.exit(1);
});

View File

@@ -0,0 +1,533 @@
/**
* DebugController - Debug/testing endpoints for architecture validation
*
* Provides endpoints to run SQL queries and check database state for testing.
*/
import { Request, Response } from 'express';
import { logger } from '../../services/logger.js';
import { getServices } from '../../services/ServiceFactory.js';
export class DebugController {
/**
* Execute a SQL query (read-only for safety)
* POST /api/v2/debug/query
* Body: { sql: string, params?: any[] }
*/
async executeQuery(req: Request, res: Response): Promise<void> {
try {
const { sql, params = [] } = req.body;
if (!sql || typeof sql !== 'string') {
res.status(400).json({ error: 'SQL query required in request body' });
return;
}
// Safety check: only allow SELECT queries
const normalizedSql = sql.trim().toUpperCase();
if (!normalizedSql.startsWith('SELECT')) {
res.status(400).json({ error: 'Only SELECT queries are allowed for security' });
return;
}
const services = getServices();
const db = services.cacheRepo.db;
const result = await db.query(sql, params);
res.json({
success: true,
result,
rowCount: result.length,
});
} catch (error) {
logger.error('DebugController: Query execution failed', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Get object info (ID, key, type) for debugging
* GET /api/v2/debug/objects?objectKey=...
*/
async getObjectInfo(req: Request, res: Response): Promise<void> {
try {
const objectKey = req.query.objectKey as string;
if (!objectKey) {
res.status(400).json({ error: 'objectKey query parameter required' });
return;
}
const services = getServices();
const obj = await services.cacheRepo.getObjectByKey(objectKey);
if (!obj) {
res.status(404).json({ error: 'Object not found' });
return;
}
// Get attribute count
const attrValues = await services.cacheRepo.getAttributeValues(obj.id);
res.json({
object: obj,
attributeValueCount: attrValues.length,
});
} catch (error) {
logger.error('DebugController: Failed to get object info', error);
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Get relation info for debugging
* GET /api/v2/debug/relations?objectKey=...
*/
async getRelationInfo(req: Request, res: Response): Promise<void> {
try {
const objectKey = req.query.objectKey as string;
if (!objectKey) {
res.status(400).json({ error: 'objectKey query parameter required' });
return;
}
const services = getServices();
const obj = await services.cacheRepo.getObjectByKey(objectKey);
if (!obj) {
res.status(404).json({ error: 'Object not found' });
return;
}
// Get relations where this object is source
const sourceRelations = await services.cacheRepo.db.query<{
sourceId: string;
targetId: string;
attributeId: number;
sourceType: string;
targetType: string;
}>(
`SELECT source_id as sourceId, target_id as targetId, attribute_id as attributeId,
source_type as sourceType, target_type as targetType
FROM object_relations
WHERE source_id = ?`,
[obj.id]
);
// Get relations where this object is target
const targetRelations = await services.cacheRepo.db.query<{
sourceId: string;
targetId: string;
attributeId: number;
sourceType: string;
targetType: string;
}>(
`SELECT source_id as sourceId, target_id as targetId, attribute_id as attributeId,
source_type as sourceType, target_type as targetType
FROM object_relations
WHERE target_id = ?`,
[obj.id]
);
res.json({
object: obj,
sourceRelations: sourceRelations.length,
targetRelations: targetRelations.length,
relations: {
outgoing: sourceRelations,
incoming: targetRelations,
},
});
} catch (error) {
logger.error('DebugController: Failed to get relation info', error);
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Get object type statistics
* GET /api/v2/debug/object-types/:typeName/stats
*/
async getObjectTypeStats(req: Request, res: Response): Promise<void> {
try {
const typeName = req.params.typeName;
const services = getServices();
// Get object count
const count = await services.cacheRepo.countObjectsByType(typeName);
// Get sample objects
const samples = await services.cacheRepo.getObjectsByType(typeName, { limit: 5 });
// Get enabled status from schema
const typeInfo = await services.schemaRepo.getObjectTypeByTypeName(typeName);
res.json({
typeName,
objectCount: count,
enabled: typeInfo?.enabled || false,
sampleObjects: samples.map(o => ({
id: o.id,
objectKey: o.objectKey,
label: o.label,
})),
});
} catch (error) {
logger.error('DebugController: Failed to get object type stats', error);
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Get all object types with their enabled status (for debugging)
* GET /api/v2/debug/all-object-types
*/
async getAllObjectTypes(req: Request, res: Response): Promise<void> {
try {
const services = getServices();
const db = services.schemaRepo.db;
// Check if object_types table exists
try {
const tableCheck = await db.query('SELECT 1 FROM object_types LIMIT 1');
} catch (error) {
logger.error('DebugController: object_types table does not exist or is not accessible', error);
res.status(500).json({
error: 'object_types table does not exist. Please run schema sync first.',
details: error instanceof Error ? error.message : 'Unknown error',
});
return;
}
// Get all object types
let allTypes: Array<{
id: number;
type_name: string | null;
display_name: string;
enabled: boolean | number;
jira_type_id: number;
schema_id: number;
}>;
try {
allTypes = await db.query<{
id: number;
type_name: string | null;
display_name: string;
enabled: boolean | number;
jira_type_id: number;
schema_id: number;
}>(
`SELECT id, type_name, display_name, enabled, jira_type_id, schema_id
FROM object_types
ORDER BY enabled DESC, type_name`
);
} catch (error) {
logger.error('DebugController: Failed to query object_types table', error);
res.status(500).json({
error: 'Failed to query object_types table',
details: error instanceof Error ? error.message : 'Unknown error',
});
return;
}
// Get enabled types via service (may fail if table has issues)
let enabledTypes: Array<{ typeName: string; displayName: string; schemaId: string; objectTypeId: number }> = [];
try {
enabledTypes = await services.schemaSyncService.getEnabledObjectTypes();
logger.debug(`DebugController: getEnabledObjectTypes returned ${enabledTypes.length} types: ${enabledTypes.map(t => t.typeName).join(', ')}`);
} catch (error) {
logger.error('DebugController: Failed to get enabled types via service', error);
if (error instanceof Error) {
logger.error('Error details:', { message: error.message, stack: error.stack });
}
// Continue without enabled types from service
}
res.json({
allTypes: allTypes.map(t => ({
id: t.id,
typeName: t.type_name,
displayName: t.display_name,
enabled: t.enabled,
jiraTypeId: t.jira_type_id,
schemaId: t.schema_id,
hasTypeName: !!(t.type_name && t.type_name.trim() !== ''),
})),
enabledTypes: enabledTypes.map(t => ({
typeName: t.typeName,
displayName: t.displayName,
schemaId: t.schemaId,
objectTypeId: t.objectTypeId,
})),
summary: {
total: allTypes.length,
enabled: allTypes.filter(t => {
const isPostgres = db.isPostgres === true;
const enabledValue = isPostgres ? (t.enabled === true) : (t.enabled === 1);
return enabledValue && t.type_name && t.type_name.trim() !== '';
}).length,
enabledWithTypeName: enabledTypes.length,
missingTypeName: allTypes.filter(t => !t.type_name || t.type_name.trim() === '').length,
},
});
} catch (error) {
logger.error('DebugController: Failed to get all object types', error);
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Diagnose a specific object type (check database state)
* GET /api/v2/debug/object-types/diagnose/:typeName
* Checks both by type_name and display_name
*/
async diagnoseObjectType(req: Request, res: Response): Promise<void> {
try {
const typeName = req.params.typeName;
const services = getServices();
const db = services.schemaRepo.db;
const isPostgres = db.isPostgres === true;
const enabledCondition = isPostgres ? 'enabled IS true' : 'enabled = 1';
// Check by type_name (exact match)
const byTypeName = await db.query<{
id: number;
schema_id: number;
jira_type_id: number;
type_name: string | null;
display_name: string;
enabled: boolean | number;
description: string | null;
}>(
`SELECT id, schema_id, jira_type_id, type_name, display_name, enabled, description
FROM object_types
WHERE type_name = ?`,
[typeName]
);
// Check by display_name (case-insensitive, partial match)
const byDisplayName = await db.query<{
id: number;
schema_id: number;
jira_type_id: number;
type_name: string | null;
display_name: string;
enabled: boolean | number;
description: string | null;
}>(
isPostgres
? `SELECT id, schema_id, jira_type_id, type_name, display_name, enabled, description
FROM object_types
WHERE LOWER(display_name) LIKE LOWER(?)`
: `SELECT id, schema_id, jira_type_id, type_name, display_name, enabled, description
FROM object_types
WHERE LOWER(display_name) LIKE LOWER(?)`,
[`%${typeName}%`]
);
// Get schema info for found types
const schemaIds = [...new Set([...byTypeName.map(t => t.schema_id), ...byDisplayName.map(t => t.schema_id)])];
const schemas = schemaIds.length > 0
? await db.query<{ id: number; jira_schema_id: string; name: string }>(
`SELECT id, jira_schema_id, name FROM schemas WHERE id IN (${schemaIds.map(() => '?').join(',')})`,
schemaIds
)
: [];
const schemaMap = new Map(schemas.map(s => [s.id, s]));
// Check enabled types via service
let enabledTypesFromService: string[] = [];
try {
const enabledTypes = await services.schemaSyncService.getEnabledObjectTypes();
enabledTypesFromService = enabledTypes.map(t => t.typeName);
} catch (error) {
logger.error('DebugController: Failed to get enabled types from service', error);
}
// Check if type is in enabled list from service
const isInEnabledList = enabledTypesFromService.includes(typeName);
res.json({
requestedType: typeName,
foundByTypeName: byTypeName.map(t => ({
id: t.id,
schemaId: t.schema_id,
jiraSchemaId: schemaMap.get(t.schema_id)?.jira_schema_id,
schemaName: schemaMap.get(t.schema_id)?.name,
jiraTypeId: t.jira_type_id,
typeName: t.type_name,
displayName: t.display_name,
enabled: t.enabled,
enabledValue: isPostgres ? (t.enabled === true) : (t.enabled === 1),
hasTypeName: !!(t.type_name && t.type_name.trim() !== ''),
description: t.description,
})),
foundByDisplayName: byDisplayName.filter(t => !byTypeName.some(t2 => t2.id === t.id)).map(t => ({
id: t.id,
schemaId: t.schema_id,
jiraSchemaId: schemaMap.get(t.schema_id)?.jira_schema_id,
schemaName: schemaMap.get(t.schema_id)?.name,
jiraTypeId: t.jira_type_id,
typeName: t.type_name,
displayName: t.display_name,
enabled: t.enabled,
enabledValue: isPostgres ? (t.enabled === true) : (t.enabled === 1),
hasTypeName: !!(t.type_name && t.type_name.trim() !== ''),
description: t.description,
})),
diagnosis: {
found: byTypeName.length > 0 || byDisplayName.length > 0,
foundExact: byTypeName.length > 0,
foundByDisplay: byDisplayName.length > 0,
isEnabled: byTypeName.length > 0
? (isPostgres ? (byTypeName[0].enabled === true) : (byTypeName[0].enabled === 1))
: byDisplayName.length > 0
? (isPostgres ? (byDisplayName[0].enabled === true) : (byDisplayName[0].enabled === 1))
: false,
hasTypeName: byTypeName.length > 0
? !!(byTypeName[0].type_name && byTypeName[0].type_name.trim() !== '')
: byDisplayName.length > 0
? !!(byDisplayName[0].type_name && byDisplayName[0].type_name.trim() !== '')
: false,
isInEnabledList,
issue: !isInEnabledList && (byTypeName.length > 0 || byDisplayName.length > 0)
? (byTypeName.length > 0 && !(byTypeName[0].type_name && byTypeName[0].type_name.trim() !== '')
? 'Type is enabled in database but has missing type_name (will be filtered out)'
: byTypeName.length > 0 && !(isPostgres ? (byTypeName[0].enabled === true) : (byTypeName[0].enabled === 1))
? 'Type exists but is not enabled in database'
: 'Type exists but not found in enabled list (may have missing type_name)')
: !isInEnabledList && byTypeName.length === 0 && byDisplayName.length === 0
? 'Type not found in database'
: 'No issues detected',
},
enabledTypesCount: enabledTypesFromService.length,
enabledTypesList: enabledTypesFromService,
});
} catch (error) {
logger.error(`DebugController: Failed to diagnose object type ${req.params.typeName}`, error);
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Fix object types with missing type_name
* POST /api/v2/debug/fix-missing-type-names
* This will try to fix object types that have NULL type_name by looking up by display_name
*/
async fixMissingTypeNames(req: Request, res: Response): Promise<void> {
try {
const services = getServices();
const db = services.schemaRepo.db;
// Find all object types with NULL or empty type_name
// Also check for enabled ones specifically
const isPostgres = db.isPostgres === true;
const enabledCondition = isPostgres ? 'enabled IS true' : 'enabled = 1';
const brokenTypes = await db.query<{
id: number;
jira_type_id: number;
display_name: string;
type_name: string | null;
enabled: boolean | number;
}>(
`SELECT id, jira_type_id, display_name, type_name, enabled
FROM object_types
WHERE (type_name IS NULL OR type_name = '')
ORDER BY enabled DESC, display_name`
);
// Also check enabled types specifically
const enabledWithNullTypeName = await db.query<{
id: number;
jira_type_id: number;
display_name: string;
type_name: string | null;
enabled: boolean | number;
}>(
`SELECT id, jira_type_id, display_name, type_name, enabled
FROM object_types
WHERE (type_name IS NULL OR type_name = '') AND ${enabledCondition}`
);
if (enabledWithNullTypeName.length > 0) {
logger.warn(`DebugController: Found ${enabledWithNullTypeName.length} ENABLED object types with missing type_name: ${enabledWithNullTypeName.map(t => t.display_name).join(', ')}`);
}
logger.info(`DebugController: Found ${brokenTypes.length} object types with missing type_name`);
const fixes: Array<{ id: number; displayName: string; fixedTypeName: string }> = [];
const errors: Array<{ id: number; error: string }> = [];
for (const broken of brokenTypes) {
try {
// Generate type_name from display_name using toPascalCase
const { toPascalCase } = await import('../../services/schemaUtils.js');
const fixedTypeName = toPascalCase(broken.display_name);
if (!fixedTypeName || fixedTypeName.trim() === '') {
errors.push({
id: broken.id,
error: `Could not generate type_name from display_name: "${broken.display_name}"`,
});
continue;
}
// Update the record
await db.execute(
`UPDATE object_types SET type_name = ?, updated_at = ? WHERE id = ?`,
[fixedTypeName, new Date().toISOString(), broken.id]
);
fixes.push({
id: broken.id,
displayName: broken.display_name,
fixedTypeName,
});
logger.info(`DebugController: Fixed object type id=${broken.id}, display_name="${broken.display_name}" -> type_name="${fixedTypeName}"`);
} catch (error) {
errors.push({
id: broken.id,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
// Re-fetch enabled types to verify fix (reuse services from line 294)
const enabledTypesAfterFix = await services.schemaSyncService.getEnabledObjectTypes();
res.json({
success: true,
fixed: fixes.length,
errors: errors.length,
fixes,
errors: errors.length > 0 ? errors : undefined,
enabledTypesAfterFix: enabledTypesAfterFix.map(t => t.typeName),
note: enabledWithNullTypeName.length > 0
? `Fixed ${enabledWithNullTypeName.length} enabled types that were missing type_name. They should now appear in enabled types list.`
: undefined,
});
} catch (error) {
logger.error('DebugController: Failed to fix missing type names', error);
res.status(500).json({
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
}

View File

@@ -0,0 +1,54 @@
/**
* HealthController - API health check endpoint
*
* Public endpoint (no auth required) to check if V2 API is working.
*/
import { Request, Response } from 'express';
import { logger } from '../../services/logger.js';
import { getServices } from '../../services/ServiceFactory.js';
export class HealthController {
/**
* Health check endpoint
* GET /api/v2/health
*/
async health(req: Request, res: Response): Promise<void> {
try {
const services = getServices();
// Check if services are initialized
const isInitialized = !!services.queryService;
// Check database connection (simple query)
let dbConnected = false;
try {
await services.schemaRepo.getAllSchemas();
dbConnected = true;
} catch (error) {
logger.warn('V2 Health: Database connection check failed', error);
}
res.json({
status: 'ok',
apiVersion: 'v2',
timestamp: new Date().toISOString(),
services: {
initialized: isInitialized,
database: dbConnected ? 'connected' : 'disconnected',
},
featureFlag: {
useV2Api: process.env.USE_V2_API === 'true',
},
});
} catch (error) {
logger.error('V2 Health: Health check failed', error);
res.status(500).json({
status: 'error',
apiVersion: 'v2',
timestamp: new Date().toISOString(),
error: 'Health check failed',
});
}
}
}

View File

@@ -0,0 +1,176 @@
/**
* ObjectsController - API handlers for object operations
*
* NO SQL, NO parsing - delegates to services.
*/
import { Request, Response } from 'express';
import { logger } from '../../services/logger.js';
import { getServices } from '../../services/ServiceFactory.js';
import type { CMDBObject, CMDBObjectTypeName } from '../../generated/jira-types.js';
import { getParamString, getQueryString, getQueryNumber } from '../../utils/queryHelpers.js';
export class ObjectsController {
/**
* Get a single object by ID or objectKey
* GET /api/v2/objects/:type/:id?refresh=true
* Supports both object ID and objectKey (checks objectKey if ID lookup fails)
*/
async getObject(req: Request, res: Response): Promise<void> {
try {
const type = getParamString(req, 'type');
const idOrKey = getParamString(req, 'id');
const forceRefresh = getQueryString(req, 'refresh') === 'true';
const services = getServices();
// Try to find object ID if idOrKey might be an objectKey
let objectId = idOrKey;
let objRecord = await services.cacheRepo.getObject(idOrKey);
if (!objRecord) {
// Try as objectKey
objRecord = await services.cacheRepo.getObjectByKey(idOrKey);
if (objRecord) {
objectId = objRecord.id;
}
}
// Force refresh if requested
if (forceRefresh && objectId) {
const enabledTypes = await services.schemaRepo.getEnabledObjectTypes();
const enabledTypeSet = new Set(enabledTypes.map(t => t.typeName));
const refreshResult = await services.refreshService.refreshObject(objectId, enabledTypeSet);
if (!refreshResult.success) {
res.status(500).json({ error: refreshResult.error || 'Failed to refresh object' });
return;
}
}
// Get from cache
if (!objectId) {
res.status(404).json({ error: 'Object not found (by ID or key)' });
return;
}
const object = await services.queryService.getObject<CMDBObject>(type as CMDBObjectTypeName, objectId);
if (!object) {
res.status(404).json({ error: 'Object not found' });
return;
}
res.json(object);
} catch (error) {
logger.error('ObjectsController: Failed to get object', error);
res.status(500).json({ error: 'Failed to get object' });
}
}
/**
* Get all objects of a type
* GET /api/v2/objects/:type?limit=100&offset=0&search=term
*/
async getObjects(req: Request, res: Response): Promise<void> {
try {
const type = getParamString(req, 'type');
const limit = getQueryNumber(req, 'limit', 1000);
const offset = getQueryNumber(req, 'offset', 0);
const search = getQueryString(req, 'search');
const services = getServices();
logger.info(`ObjectsController.getObjects: Querying for type="${type}" with limit=${limit}, offset=${offset}, search=${search || 'none'}`);
let objects: CMDBObject[];
if (search) {
objects = await services.queryService.searchByLabel<CMDBObject>(
type as CMDBObjectTypeName,
search,
{ limit, offset }
);
} else {
objects = await services.queryService.getObjects<CMDBObject>(
type as CMDBObjectTypeName,
{ limit, offset }
);
}
const totalCount = await services.queryService.countObjects(type as CMDBObjectTypeName);
logger.info(`ObjectsController.getObjects: Found ${objects.length} objects of type "${type}" (total count: ${totalCount})`);
// If no objects found, provide diagnostic information
if (objects.length === 0) {
// Check what object types actually exist in the database
const db = services.cacheRepo.db;
try {
const availableTypes = await db.query<{ object_type_name: string; count: number }>(
`SELECT object_type_name, COUNT(*) as count
FROM objects
GROUP BY object_type_name
ORDER BY count DESC
LIMIT 10`
);
if (availableTypes.length > 0) {
logger.warn(`ObjectsController.getObjects: No objects found for type "${type}". Available types in database:`, {
requestedType: type,
availableTypes: availableTypes.map(t => ({ typeName: t.object_type_name, count: t.count })),
});
}
} catch (error) {
logger.debug('ObjectsController.getObjects: Failed to query available types', error);
}
}
res.json({
objectType: type,
objects,
count: objects.length,
totalCount,
offset,
limit,
});
} catch (error) {
logger.error('ObjectsController: Failed to get objects', error);
res.status(500).json({ error: 'Failed to get objects' });
}
}
/**
* Update an object
* PUT /api/v2/objects/:type/:id
*/
async updateObject(req: Request, res: Response): Promise<void> {
try {
const type = getParamString(req, 'type');
const id = getParamString(req, 'id');
const updates = req.body as Record<string, unknown>;
const services = getServices();
const result = await services.writeThroughService.updateObject(
type as CMDBObjectTypeName,
id,
updates
);
if (!result.success) {
res.status(400).json({ error: result.error || 'Failed to update object' });
return;
}
// Fetch updated object
const updated = await services.queryService.getObject<CMDBObject>(
type as CMDBObjectTypeName,
id
);
res.json(updated || { success: true });
} catch (error) {
logger.error('ObjectsController: Failed to update object', error);
res.status(500).json({ error: 'Failed to update object' });
}
}
}

View File

@@ -0,0 +1,277 @@
/**
* SyncController - API handlers for sync operations
*/
import { Request, Response } from 'express';
import { logger } from '../../services/logger.js';
import { getServices } from '../../services/ServiceFactory.js';
export class SyncController {
/**
* Sync all schemas
* POST /api/v2/sync/schemas
*/
async syncSchemas(req: Request, res: Response): Promise<void> {
try {
const services = getServices();
const result = await services.schemaSyncService.syncAllSchemas();
res.json({
success: true,
...result,
});
} catch (error) {
logger.error('SyncController: Failed to sync schemas', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Sync all enabled object types
* POST /api/v2/sync/objects
*/
async syncAllObjects(req: Request, res: Response): Promise<void> {
try {
const services = getServices();
// Get enabled types
const enabledTypes = await services.schemaSyncService.getEnabledObjectTypes();
if (enabledTypes.length === 0) {
res.status(400).json({
success: false,
error: 'No object types enabled for syncing. Please configure object types in Schema Configuration.',
});
return;
}
const results = [];
let totalObjectsProcessed = 0;
let totalObjectsCached = 0;
let totalRelations = 0;
// Sync each enabled type
for (const type of enabledTypes) {
const result = await services.objectSyncService.syncObjectType(
type.schemaId,
type.objectTypeId,
type.typeName,
type.displayName
);
results.push({
typeName: type.typeName,
displayName: type.displayName,
...result,
});
totalObjectsProcessed += result.objectsProcessed;
totalObjectsCached += result.objectsCached;
totalRelations += result.relationsExtracted;
}
res.json({
success: true,
stats: results,
totalObjectsProcessed,
totalObjectsCached,
totalRelations,
});
} catch (error) {
logger.error('SyncController: Failed to sync objects', error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Sync a specific object type
* POST /api/v2/sync/objects/:typeName
*/
async syncObjectType(req: Request, res: Response): Promise<void> {
try {
const typeName = req.params.typeName;
const services = getServices();
// Get enabled types
let enabledTypes = await services.schemaSyncService.getEnabledObjectTypes();
// Filter out any entries with missing typeName
enabledTypes = enabledTypes.filter(t => t && t.typeName);
// Debug logging - also check database directly
logger.info(`SyncController: Looking for type "${typeName}" in ${enabledTypes.length} enabled types`);
logger.debug(`SyncController: Enabled types: ${JSON.stringify(enabledTypes.map(t => ({ typeName: t?.typeName, displayName: t?.displayName })))}`);
// Additional debug: Check database directly for enabled types (including those with missing type_name)
const db = services.schemaRepo.db;
const isPostgres = db.isPostgres === true;
const enabledCondition = isPostgres ? 'enabled IS true' : 'enabled = 1';
const dbCheck = await db.query<{ type_name: string | null; display_name: string; enabled: boolean | number; id: number; jira_type_id: number }>(
`SELECT id, jira_type_id, type_name, display_name, enabled FROM object_types WHERE ${enabledCondition}`
);
logger.info(`SyncController: Found ${dbCheck.length} enabled types in database (raw check)`);
logger.debug(`SyncController: Database enabled types (raw): ${JSON.stringify(dbCheck.map(t => ({ id: t.id, displayName: t.display_name, typeName: t.type_name, hasTypeName: !!(t.type_name && t.type_name.trim() !== '') })))}`);
// Check if AzureSubscription or similar is enabled but missing type_name
const matchingByDisplayName = dbCheck.filter(t =>
t.display_name.toLowerCase().includes(typeName.toLowerCase()) ||
typeName.toLowerCase().includes(t.display_name.toLowerCase())
);
if (matchingByDisplayName.length > 0) {
logger.warn(`SyncController: Found enabled type(s) matching "${typeName}" by display_name but not in enabled list:`, {
matches: matchingByDisplayName.map(t => ({
id: t.id,
displayName: t.display_name,
typeName: t.type_name,
hasTypeName: !!(t.type_name && t.type_name.trim() !== ''),
enabled: t.enabled,
})),
});
}
const type = enabledTypes.find(t => t && t.typeName === typeName);
if (!type) {
// Check if type exists but is not enabled or has missing type_name
const allType = await services.schemaRepo.getObjectTypeByTypeName(typeName);
if (allType) {
// Debug: Check the actual enabled value and query
const enabledValue = allType.enabled;
const enabledType = typeof enabledValue;
logger.warn(`SyncController: Type "${typeName}" found but not in enabled list. enabled=${enabledValue} (type: ${enabledType}), enabledTypes.length=${enabledTypes.length}`);
logger.debug(`SyncController: Enabled types details: ${JSON.stringify(enabledTypes)}`);
// Try to find it with different case (handle undefined typeName)
const caseInsensitiveMatch = enabledTypes.find(t => t && t.typeName && t.typeName.toLowerCase() === typeName.toLowerCase());
if (caseInsensitiveMatch) {
logger.warn(`SyncController: Found type with different case: "${caseInsensitiveMatch.typeName}" vs "${typeName}"`);
// Use the found type with correct case
const result = await services.objectSyncService.syncObjectType(
caseInsensitiveMatch.schemaId,
caseInsensitiveMatch.objectTypeId,
caseInsensitiveMatch.typeName,
caseInsensitiveMatch.displayName
);
res.json({
success: true,
...result,
hasErrors: result.errors.length > 0,
note: `Type name case corrected: "${typeName}" -> "${caseInsensitiveMatch.typeName}"`,
});
return;
}
// Direct SQL query to verify enabled status and type_name
const db = services.schemaRepo.db;
const isPostgres = db.isPostgres === true;
const rawCheck = await db.queryOne<{ enabled: boolean | number; type_name: string | null; display_name: string }>(
`SELECT enabled, type_name, display_name FROM object_types WHERE type_name = ?`,
[typeName]
);
// Check if type is enabled but missing type_name in enabled list (might be filtered out)
const enabledCondition = isPostgres ? 'enabled IS true' : 'enabled = 1';
const enabledWithMissingTypeName = await db.query<{ display_name: string; type_name: string | null; enabled: boolean | number }>(
`SELECT display_name, type_name, enabled FROM object_types WHERE display_name ILIKE ? AND ${enabledCondition}`,
[`%${typeName}%`]
);
// Get list of all enabled type names for better error message
const enabledTypeNames = enabledTypes.map(t => t.typeName).filter(Boolean);
// Check if the issue is that the type is enabled but has a missing type_name
if (rawCheck && (rawCheck.enabled === true || rawCheck.enabled === 1)) {
if (!rawCheck.type_name || rawCheck.type_name.trim() === '') {
res.status(400).json({
success: false,
error: `Object type "${typeName}" is enabled in the database but has a missing or empty type_name. This prevents it from being synced. Please run schema sync again to fix the type_name, or use the "Fix Missing Type Names" debug tool (Settings → Debug).`,
details: {
requestedType: typeName,
displayName: rawCheck.display_name,
enabledInDatabase: rawCheck.enabled,
typeNameInDatabase: rawCheck.type_name,
enabledTypesCount: enabledTypes.length,
enabledTypeNames: enabledTypeNames,
hint: 'Run schema sync to ensure all object types have a valid type_name, or use the Debug page to fix missing type names.',
},
});
return;
}
}
res.status(400).json({
success: false,
error: `Object type "${typeName}" is not enabled for syncing. Currently enabled types: ${enabledTypeNames.length > 0 ? enabledTypeNames.join(', ') : 'none'}. Please enable "${typeName}" in Schema Configuration settings (Settings → Schema Configuratie).`,
details: {
requestedType: typeName,
enabledInDatabase: rawCheck?.enabled,
typeNameInDatabase: rawCheck?.type_name,
enabledTypesCount: enabledTypes.length,
enabledTypeNames: enabledTypeNames,
hint: enabledTypeNames.length === 0
? 'No object types are currently enabled. Please enable at least one object type in Schema Configuration.'
: `You enabled: ${enabledTypeNames.join(', ')}. Please enable "${typeName}" if you want to sync it.`,
},
});
} else {
// Type not found by type_name - check by display_name (case-insensitive)
const db = services.schemaRepo.db;
const byDisplayName = await db.queryOne<{ enabled: boolean | number; type_name: string | null; display_name: string }>(
`SELECT enabled, type_name, display_name FROM object_types WHERE display_name ILIKE ? LIMIT 1`,
[`%${typeName}%`]
);
if (byDisplayName && (byDisplayName.enabled === true || byDisplayName.enabled === 1)) {
// Type is enabled but type_name might be missing or different
res.status(400).json({
success: false,
error: `Found enabled type "${byDisplayName.display_name}" but it has ${byDisplayName.type_name ? `type_name="${byDisplayName.type_name}"` : 'missing type_name'}. ${!byDisplayName.type_name ? 'Please run schema sync to fix the type_name, or use the "Fix Missing Type Names" debug tool.' : `Please use the correct type_name: "${byDisplayName.type_name}"`}`,
details: {
requestedType: typeName,
foundDisplayName: byDisplayName.display_name,
foundTypeName: byDisplayName.type_name,
enabledInDatabase: byDisplayName.enabled,
hint: !byDisplayName.type_name
? 'Run schema sync to ensure all object types have a valid type_name.'
: `Use type_name "${byDisplayName.type_name}" instead of "${typeName}"`,
},
});
return;
}
res.status(400).json({
success: false,
error: `Object type ${typeName} not found. Available enabled types: ${enabledTypes.map(t => t.typeName).join(', ') || 'none'}. Please run schema sync first.`,
});
}
return;
}
const result = await services.objectSyncService.syncObjectType(
type.schemaId,
type.objectTypeId,
type.typeName,
type.displayName
);
// Return success even if there are errors (errors are in result.errors array)
res.json({
success: true,
...result,
hasErrors: result.errors.length > 0,
});
} catch (error) {
logger.error(`SyncController: Failed to sync object type ${req.params.typeName}`, error);
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
}

View File

@@ -0,0 +1,47 @@
/**
* V2 API Routes - New refactored architecture
*
* Feature flag: USE_V2_API=true enables these routes
*/
import { Router } from 'express';
import { ObjectsController } from '../controllers/ObjectsController.js';
import { SyncController } from '../controllers/SyncController.js';
import { HealthController } from '../controllers/HealthController.js';
import { DebugController } from '../controllers/DebugController.js';
import { requireAuth, requirePermission } from '../../middleware/authorization.js';
const router = Router();
const objectsController = new ObjectsController();
const syncController = new SyncController();
const healthController = new HealthController();
const debugController = new DebugController();
// Health check - public endpoint (no auth required)
router.get('/health', (req, res) => healthController.health(req, res));
// All other routes require authentication
router.use(requireAuth);
// Object routes
router.get('/objects/:type', requirePermission('search'), (req, res) => objectsController.getObjects(req, res));
router.get('/objects/:type/:id', requirePermission('search'), (req, res) => objectsController.getObject(req, res));
router.put('/objects/:type/:id', requirePermission('write'), (req, res) => objectsController.updateObject(req, res));
// Sync routes
router.post('/sync/schemas', requirePermission('admin'), (req, res) => syncController.syncSchemas(req, res));
router.post('/sync/objects', requirePermission('admin'), (req, res) => syncController.syncAllObjects(req, res));
router.post('/sync/objects/:typeName', requirePermission('admin'), (req, res) => syncController.syncObjectType(req, res));
// Debug routes (admin only)
// IMPORTANT: More specific routes must come BEFORE parameterized routes
router.post('/debug/query', requirePermission('admin'), (req, res) => debugController.executeQuery(req, res));
router.get('/debug/objects', requirePermission('admin'), (req, res) => debugController.getObjectInfo(req, res));
router.get('/debug/relations', requirePermission('admin'), (req, res) => debugController.getRelationInfo(req, res));
router.get('/debug/all-object-types', requirePermission('admin'), (req, res) => debugController.getAllObjectTypes(req, res));
router.post('/debug/fix-missing-type-names', requirePermission('admin'), (req, res) => debugController.fixMissingTypeNames(req, res));
// Specific routes before parameterized routes
router.get('/debug/object-types/diagnose/:typeName', requirePermission('admin'), (req, res) => debugController.diagnoseObjectType(req, res));
router.get('/debug/object-types/:typeName/stats', requirePermission('admin'), (req, res) => debugController.getObjectTypeStats(req, res));
export default router;

View File

@@ -28,7 +28,6 @@ export type JiraAuthMethod = 'pat' | 'oauth';
interface Config {
// Jira Assets
jiraHost: string;
jiraSchemaId: string;
// Jira Service Account Token (for read operations: sync, fetching data)
jiraServiceAccountToken: string;
@@ -90,7 +89,6 @@ function getJiraAuthMethod(): JiraAuthMethod {
export const config: Config = {
// Jira Assets
jiraHost: getOptionalEnvVar('JIRA_HOST', 'https://jira.zuyderland.nl'),
jiraSchemaId: getOptionalEnvVar('JIRA_SCHEMA_ID'),
// Jira Service Account Token (for read operations: sync, fetching data)
jiraServiceAccountToken: getOptionalEnvVar('JIRA_SERVICE_ACCOUNT_TOKEN'),
@@ -130,7 +128,6 @@ export function validateConfig(): void {
if (config.jiraAuthMethod === 'pat') {
// JIRA_PAT is configured in user profiles, not in ENV
warnings.push('JIRA_AUTH_METHOD=pat - users must configure PAT in their profile settings');
} else if (config.jiraAuthMethod === 'oauth') {
if (!config.jiraOAuthClientId) {
missingVars.push('JIRA_OAUTH_CLIENT_ID (required for OAuth authentication)');
@@ -143,16 +140,10 @@ export function validateConfig(): void {
}
}
// General required config
if (!config.jiraSchemaId) missingVars.push('JIRA_SCHEMA_ID');
// Service account token warning (not required, but recommended for sync operations)
if (!config.jiraServiceAccountToken) {
warnings.push('JIRA_SERVICE_ACCOUNT_TOKEN not configured - sync and read operations may not work. Users can still use their personal PAT for reads as fallback.');
}
// AI API keys are configured in user profiles, not in ENV
warnings.push('AI API keys must be configured in user profile settings');
if (warnings.length > 0) {
warnings.forEach(w => console.warn(`Warning: ${w}`));

View File

@@ -0,0 +1,121 @@
// ==========================
// API Payload Types
// ==========================
export interface AssetsPayload {
objectEntries: ObjectEntry[];
}
export interface ObjectEntry {
id: string | number;
objectKey: string;
label: string;
objectType: {
id: number;
name: string;
};
created: string;
updated: string;
hasAvatar: boolean;
timestamp: number;
attributes?: ObjectAttribute[];
}
export interface ObjectAttribute {
id: number;
objectTypeAttributeId: number;
objectAttributeValues: ObjectAttributeValue[];
}
// ==========================
// Attribute Value Union
// ==========================
export type ObjectAttributeValue =
| SimpleValue
| StatusValue
| ConfluenceValue
| UserValue
| ReferenceValue;
export interface SimpleValue {
value: string | number | boolean;
searchValue: string;
referencedType: false;
displayValue: string;
}
export interface StatusValue {
status: { id: number; name: string; category: number };
searchValue: string;
referencedType: boolean;
displayValue: string;
}
export interface ConfluenceValue {
confluencePage: { id: string; title: string; url: string };
searchValue: string;
referencedType: boolean;
displayValue: string;
}
export interface UserValue {
user: {
avatarUrl: string;
displayName: string;
name: string;
key: string;
renderedLink: string;
isDeleted: boolean;
};
searchValue: string;
referencedType: boolean;
displayValue: string;
}
export interface ReferenceValue {
referencedObject: ReferencedObject;
searchValue: string;
referencedType: true;
displayValue: string;
}
export interface ReferencedObject {
id: string | number;
objectKey: string;
label: string;
name?: string;
archived?: boolean;
objectType: {
id: number;
name: string;
};
created: string;
updated: string;
timestamp: number;
hasAvatar: boolean;
attributes?: ObjectAttribute[];
_links?: { self: string };
}
// ==========================
// Type Guards (MANDATORY)
// ==========================
export function isReferenceValue(
v: ObjectAttributeValue
): v is ReferenceValue {
return (v as ReferenceValue).referencedObject !== undefined;
}
export function isSimpleValue(
v: ObjectAttributeValue
): v is SimpleValue {
return (v as SimpleValue).value !== undefined;
}
export function hasAttributes(
obj: ObjectEntry | ReferencedObject
): obj is (ObjectEntry | ReferencedObject) & { attributes: ObjectAttribute[] } {
return Array.isArray((obj as any).attributes);
}

View File

@@ -0,0 +1,38 @@
/**
* Sync Policy - Determines how objects are handled during sync
*/
export enum SyncPolicy {
/**
* Full sync: fetch all objects, cache all attributes
* Used for enabled object types in schema configuration
*/
ENABLED = 'enabled',
/**
* Reference-only: cache minimal metadata for referenced objects
* Used for disabled object types that are referenced by enabled types
*/
REFERENCE_ONLY = 'reference_only',
/**
* Skip: don't sync this object type at all
* Used for object types not in use
*/
SKIP = 'skip',
}
/**
* Get sync policy for an object type
*/
export function getSyncPolicy(
typeName: string,
enabledTypes: Set<string>
): SyncPolicy {
if (enabledTypes.has(typeName)) {
return SyncPolicy.ENABLED;
}
// We still need to cache referenced objects, even if their type is disabled
// This allows reference resolution without full sync
return SyncPolicy.REFERENCE_ONLY;
}

View File

@@ -8,18 +8,12 @@
-- =============================================================================
-- Core Tables
-- =============================================================================
-- Cached CMDB objects (all types stored in single table with JSON data)
CREATE TABLE IF NOT EXISTS cached_objects (
id TEXT PRIMARY KEY,
object_key TEXT NOT NULL UNIQUE,
object_type TEXT NOT NULL,
label TEXT NOT NULL,
data JSONB NOT NULL,
jira_updated_at TEXT,
jira_created_at TEXT,
cached_at TEXT NOT NULL
);
--
-- NOTE: This schema is LEGACY and deprecated.
-- The current system uses the normalized schema defined in
-- backend/src/services/database/normalized-schema.ts
--
-- This file is kept for reference and migration purposes only.
-- Object relations (references between objects)
CREATE TABLE IF NOT EXISTS object_relations (
@@ -43,12 +37,6 @@ CREATE TABLE IF NOT EXISTS sync_metadata (
-- Indices for Performance
-- =============================================================================
CREATE INDEX IF NOT EXISTS idx_objects_type ON cached_objects(object_type);
CREATE INDEX IF NOT EXISTS idx_objects_key ON cached_objects(object_key);
CREATE INDEX IF NOT EXISTS idx_objects_updated ON cached_objects(jira_updated_at);
CREATE INDEX IF NOT EXISTS idx_objects_label ON cached_objects(label);
CREATE INDEX IF NOT EXISTS idx_objects_data_gin ON cached_objects USING GIN (data);
CREATE INDEX IF NOT EXISTS idx_relations_source ON object_relations(source_id);
CREATE INDEX IF NOT EXISTS idx_relations_target ON object_relations(target_id);
CREATE INDEX IF NOT EXISTS idx_relations_source_type ON object_relations(source_type);

View File

@@ -7,18 +7,12 @@
-- =============================================================================
-- Core Tables
-- =============================================================================
-- Cached CMDB objects (all types stored in single table with JSON data)
CREATE TABLE IF NOT EXISTS cached_objects (
id TEXT PRIMARY KEY,
object_key TEXT NOT NULL UNIQUE,
object_type TEXT NOT NULL,
label TEXT NOT NULL,
data JSON NOT NULL,
jira_updated_at TEXT,
jira_created_at TEXT,
cached_at TEXT NOT NULL
);
--
-- NOTE: This schema is LEGACY and deprecated.
-- The current system uses the normalized schema defined in
-- backend/src/services/database/normalized-schema.ts
--
-- This file is kept for reference and migration purposes only.
-- Object relations (references between objects)
CREATE TABLE IF NOT EXISTS object_relations (
@@ -42,11 +36,6 @@ CREATE TABLE IF NOT EXISTS sync_metadata (
-- Indices for Performance
-- =============================================================================
CREATE INDEX IF NOT EXISTS idx_objects_type ON cached_objects(object_type);
CREATE INDEX IF NOT EXISTS idx_objects_key ON cached_objects(object_key);
CREATE INDEX IF NOT EXISTS idx_objects_updated ON cached_objects(jira_updated_at);
CREATE INDEX IF NOT EXISTS idx_objects_label ON cached_objects(label);
CREATE INDEX IF NOT EXISTS idx_relations_source ON object_relations(source_id);
CREATE INDEX IF NOT EXISTS idx_relations_target ON object_relations(target_id);
CREATE INDEX IF NOT EXISTS idx_relations_source_type ON object_relations(source_type);

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -6,7 +6,6 @@ import cookieParser from 'cookie-parser';
import { config, validateConfig } from './config/env.js';
import { logger } from './services/logger.js';
import { dataService } from './services/dataService.js';
import { syncEngine } from './services/syncEngine.js';
import { cmdbService } from './services/cmdbService.js';
import applicationsRouter from './routes/applications.js';
import classificationsRouter from './routes/classifications.js';
@@ -22,6 +21,8 @@ import searchRouter from './routes/search.js';
import cacheRouter from './routes/cache.js';
import objectsRouter from './routes/objects.js';
import schemaRouter from './routes/schema.js';
import dataValidationRouter from './routes/dataValidation.js';
import schemaConfigurationRouter from './routes/schemaConfiguration.js';
import { runMigrations } from './services/database/migrations.js';
// Validate configuration
@@ -63,8 +64,10 @@ app.use(authMiddleware);
// Set user token and settings on services for each request
app.use(async (req, res, next) => {
// Set user's OAuth token if available (for OAuth sessions)
let userToken: string | null = null;
if (req.accessToken) {
cmdbService.setUserToken(req.accessToken);
userToken = req.accessToken;
}
// Set user's Jira PAT and AI keys if user is authenticated and has local account
@@ -75,15 +78,12 @@ app.use(async (req, res, next) => {
if (settings?.jira_pat) {
// Use user's Jira PAT from profile settings (preferred for writes)
cmdbService.setUserToken(settings.jira_pat);
userToken = settings.jira_pat;
} else if (config.jiraServiceAccountToken) {
// Fallback to service account token if user doesn't have PAT configured
// This allows writes to work when JIRA_SERVICE_ACCOUNT_TOKEN is set in .env
cmdbService.setUserToken(config.jiraServiceAccountToken);
userToken = config.jiraServiceAccountToken;
logger.debug('Using service account token as fallback (user PAT not configured)');
} else {
// No token available - clear token
cmdbService.setUserToken(null);
}
// Store user settings in request for services to access
@@ -92,18 +92,35 @@ app.use(async (req, res, next) => {
// If user settings can't be loaded, try service account token as fallback
logger.debug('Failed to load user settings:', error);
if (config.jiraServiceAccountToken) {
cmdbService.setUserToken(config.jiraServiceAccountToken);
userToken = config.jiraServiceAccountToken;
logger.debug('Using service account token as fallback (user settings load failed)');
} else {
cmdbService.setUserToken(null);
}
}
}
// Set token on old services (for backward compatibility)
if (userToken) {
cmdbService.setUserToken(userToken);
} else {
// No user authenticated - clear token
cmdbService.setUserToken(null);
}
// Set token on new V2 infrastructure client (if feature flag enabled)
if (process.env.USE_V2_API === 'true') {
try {
const { jiraAssetsClient } = await import('./infrastructure/jira/JiraAssetsClient.js');
jiraAssetsClient.setRequestToken(userToken);
// Clear token after response
res.on('finish', () => {
jiraAssetsClient.clearRequestToken();
});
} catch (error) {
// V2 API not loaded - ignore
}
}
// Clear token after response is sent
// Clear token after response is sent (for old services)
res.on('finish', () => {
cmdbService.clearUserToken();
});
@@ -119,8 +136,8 @@ app.get('/health', async (req, res) => {
res.json({
status: 'ok',
timestamp: new Date().toISOString(),
dataSource: dataService.isUsingJiraAssets() ? 'jira-assets-cached' : 'mock-data',
jiraConnected: dataService.isUsingJiraAssets() ? jiraConnected : null,
dataSource: 'jira-assets-cached', // Always uses Jira Assets (mock data removed)
jiraConnected: jiraConnected,
aiConfigured: true, // AI is configured per-user in profile settings
cache: {
isWarm: cacheStatus.isWarm,
@@ -152,6 +169,38 @@ app.use('/api/search', searchRouter);
app.use('/api/cache', cacheRouter);
app.use('/api/objects', objectsRouter);
app.use('/api/schema', schemaRouter);
app.use('/api/data-validation', dataValidationRouter);
app.use('/api/schema-configuration', schemaConfigurationRouter);
// V2 API routes (new refactored architecture) - Feature flag: USE_V2_API
const useV2Api = process.env.USE_V2_API === 'true';
const useV2ApiEnv = process.env.USE_V2_API || 'not set';
logger.info(`V2 API feature flag: USE_V2_API=${useV2ApiEnv} (enabled: ${useV2Api})`);
if (useV2Api) {
try {
logger.debug('Loading V2 API routes from ./api/routes/v2.js...');
const v2Router = (await import('./api/routes/v2.js')).default;
if (!v2Router) {
logger.error('❌ V2 API router is undefined - route file did not export default router');
} else {
app.use('/api/v2', v2Router);
logger.info('✅ V2 API routes enabled and mounted at /api/v2');
logger.debug('V2 API router type:', typeof v2Router, 'is function:', typeof v2Router === 'function');
}
} catch (error) {
logger.error('❌ Failed to load V2 API routes', error);
if (error instanceof Error) {
logger.error('Error details:', {
message: error.message,
stack: error.stack,
name: error.name,
});
}
}
} else {
logger.info(` V2 API routes disabled (USE_V2_API=${useV2ApiEnv}, set USE_V2_API=true to enable)`);
}
// Error handling
app.use((err: Error, req: express.Request, res: express.Response, next: express.NextFunction) => {
@@ -164,7 +213,20 @@ app.use((err: Error, req: express.Request, res: express.Response, next: express.
// 404 handler
app.use((req, res) => {
res.status(404).json({ error: 'Not found' });
// Provide helpful error messages for V2 API routes
if (req.path.startsWith('/api/v2/')) {
const useV2Api = process.env.USE_V2_API === 'true';
if (!useV2Api) {
res.status(404).json({
error: 'V2 API routes are not enabled',
message: 'Please set USE_V2_API=true in environment variables and restart the server to use V2 API endpoints.',
path: req.path,
});
return;
}
}
res.status(404).json({ error: 'Not found', path: req.path });
});
// Start server
@@ -173,26 +235,51 @@ app.listen(PORT, async () => {
logger.info(`Server running on http://localhost:${PORT}`);
logger.info(`Environment: ${config.nodeEnv}`);
logger.info(`AI Classification: Configured per-user in profile settings`);
logger.info(`Jira Assets: ${config.jiraSchemaId ? 'Schema configured - users configure PAT in profile' : 'Schema not configured'}`);
// Run database migrations
// Log V2 API feature flag status
const useV2ApiEnv = process.env.USE_V2_API || 'not set';
const useV2ApiEnabled = process.env.USE_V2_API === 'true';
logger.info(`V2 API Feature Flag: USE_V2_API=${useV2ApiEnv} (${useV2ApiEnabled ? '✅ ENABLED' : '❌ DISABLED'})`);
// Check if schemas exist in database
// Note: Schemas table may not exist yet if schema hasn't been initialized
let hasSchemas = false;
try {
await runMigrations();
logger.info('Database migrations completed');
const { normalizedCacheStore } = await import('./services/normalizedCacheStore.js');
const db = (normalizedCacheStore as any).db;
if (db) {
await db.ensureInitialized?.();
try {
const schemaRow = await db.queryOne<{ count: number }>(
`SELECT COUNT(*) as count FROM schemas`
);
hasSchemas = (schemaRow?.count || 0) > 0;
} catch (tableError: any) {
// If schemas table doesn't exist yet, that's okay - schema hasn't been initialized
if (tableError?.message?.includes('does not exist') ||
tableError?.message?.includes('relation') ||
tableError?.code === '42P01') { // PostgreSQL: undefined table
logger.debug('Schemas table does not exist yet (will be created by migrations)');
hasSchemas = false;
} else {
throw tableError; // Re-throw other errors
}
}
}
} catch (error) {
logger.error('Failed to run database migrations', error);
logger.debug('Failed to check if schemas exist in database (table may not exist yet)', error);
}
// Initialize sync engine if Jira schema is configured
// Note: Sync engine will only sync when users with configured Jira PATs make requests
// This prevents unauthorized Jira API calls
if (config.jiraSchemaId) {
try {
await syncEngine.initialize();
logger.info('Sync Engine: Initialized (sync on-demand per user request)');
} catch (error) {
logger.error('Failed to initialize sync engine', error);
}
logger.info(`Jira Assets: ${hasSchemas ? 'Schemas configured in database - users configure PAT in profile' : 'No schemas configured - use Schema Configuration page to discover schemas'}`);
logger.info('Sync: All syncs must be triggered manually from the GUI (no auto-start)');
logger.info('Data: All data comes from Jira Assets API (mock data removed)');
// Run database migrations FIRST to create schemas table before other services try to use it
try {
logger.info('Running database migrations...');
await runMigrations();
logger.info('✅ Database migrations completed');
} catch (error) {
logger.error('❌ Failed to run database migrations', error);
}
});
@@ -200,8 +287,7 @@ app.listen(PORT, async () => {
const shutdown = () => {
logger.info('Shutdown signal received: stopping services...');
// Stop sync engine
syncEngine.stop();
// Note: No sync engine to stop - syncs are only triggered from GUI
logger.info('Services stopped, exiting');
process.exit(0);

View File

@@ -0,0 +1,330 @@
/**
* JiraAssetsClient - Pure HTTP API client
*
* NO business logic, NO parsing, NO caching.
* Only HTTP requests to Jira Assets API.
*/
import { config } from '../../config/env.js';
import { logger } from '../../services/logger.js';
import type { AssetsPayload, ObjectEntry } from '../../domain/jiraAssetsPayload.js';
export interface JiraUpdatePayload {
objectTypeId?: number;
attributes: Array<{
objectTypeAttributeId: number;
objectAttributeValues: Array<{ value?: string }>;
}>;
}
export class JiraAssetsClient {
private baseUrl: string;
private serviceAccountToken: string | null = null;
private requestToken: string | null = null;
constructor() {
this.baseUrl = `${config.jiraHost}/rest/insight/1.0`;
this.serviceAccountToken = config.jiraServiceAccountToken || null;
}
setRequestToken(token: string | null): void {
this.requestToken = token;
}
clearRequestToken(): void {
this.requestToken = null;
}
hasToken(): boolean {
return !!(this.serviceAccountToken || this.requestToken);
}
hasUserToken(): boolean {
return !!this.requestToken;
}
private getHeaders(forWrite: boolean = false): Record<string, string> {
const headers: Record<string, string> = {
'Content-Type': 'application/json',
'Accept': 'application/json',
};
if (forWrite) {
if (!this.requestToken) {
throw new Error('Jira Personal Access Token not configured. Please configure it in your user settings to enable saving changes to Jira.');
}
headers['Authorization'] = `Bearer ${this.requestToken}`;
} else {
const token = this.serviceAccountToken || this.requestToken;
if (!token) {
throw new Error('Jira token not configured. Please configure JIRA_SERVICE_ACCOUNT_TOKEN in .env or a Personal Access Token in your user settings.');
}
headers['Authorization'] = `Bearer ${token}`;
}
return headers;
}
/**
* Get a single object by ID
*/
async getObject(objectId: string): Promise<ObjectEntry | null> {
try {
const url = `/object/${objectId}?includeAttributes=true&includeAttributesDeep=2`;
const response = await fetch(`${this.baseUrl}${url}`, {
headers: this.getHeaders(false),
});
if (!response.ok) {
if (response.status === 404) {
return null;
}
const text = await response.text();
throw new Error(`Jira API error ${response.status}: ${text}`);
}
return await response.json() as ObjectEntry;
} catch (error) {
logger.error(`JiraAssetsClient: Failed to get object ${objectId}`, error);
throw error;
}
}
/**
* Search objects using IQL/AQL
*/
async searchObjects(
iql: string,
schemaId: string,
options: {
page?: number;
pageSize?: number;
} = {}
): Promise<{ objectEntries: ObjectEntry[]; totalCount: number; hasMore: boolean }> {
// Validate schemaId is provided and not empty
if (!schemaId || schemaId.trim() === '') {
throw new Error('Schema ID is required and cannot be empty. This usually means the object type is not properly associated with a schema. Please run schema sync first.');
}
const { page = 1, pageSize = 50 } = options;
// Detect API type (Data Center vs Cloud) based on host
const isDataCenter = !config.jiraHost.includes('atlassian.net');
let response: { objectEntries: ObjectEntry[]; totalCount?: number; totalFilterCount?: number };
if (isDataCenter) {
// Data Center: Try AQL first, fallback to IQL
try {
const params = new URLSearchParams({
qlQuery: iql,
page: page.toString(),
resultPerPage: pageSize.toString(),
includeAttributes: 'true',
includeAttributesDeep: '2',
objectSchemaId: schemaId,
});
const url = `${this.baseUrl}/aql/objects?${params.toString()}`;
const httpResponse = await fetch(url, {
headers: this.getHeaders(false),
});
if (!httpResponse.ok) {
const errorText = await httpResponse.text();
const errorMessage = errorText || `AQL failed: ${httpResponse.status}`;
logger.warn(`JiraAssetsClient: AQL query failed (${httpResponse.status}): ${errorMessage}. Query: ${iql}`);
throw new Error(errorMessage);
}
response = await httpResponse.json() as { objectEntries: ObjectEntry[]; totalCount?: number; totalFilterCount?: number };
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.warn(`JiraAssetsClient: AQL endpoint failed, falling back to IQL. Error: ${errorMessage}`, error);
const params = new URLSearchParams({
iql,
page: page.toString(),
resultPerPage: pageSize.toString(),
includeAttributes: 'true',
includeAttributesDeep: '2',
objectSchemaId: schemaId,
});
const url = `${this.baseUrl}/iql/objects?${params.toString()}`;
const httpResponse = await fetch(url, {
headers: this.getHeaders(false),
});
if (!httpResponse.ok) {
const text = await httpResponse.text();
throw new Error(`Jira API error ${httpResponse.status}: ${text}`);
}
response = await httpResponse.json() as { objectEntries: ObjectEntry[]; totalCount?: number; totalFilterCount?: number };
}
} else {
// Jira Cloud: POST to AQL endpoint
const url = `${this.baseUrl}/aql/objects`;
const requestBody = {
qlQuery: iql,
page,
resultPerPage: pageSize,
includeAttributes: true,
includeAttributesDeep: 2,
objectSchemaId: schemaId,
};
const httpResponse = await fetch(url, {
method: 'POST',
headers: this.getHeaders(false),
body: JSON.stringify(requestBody),
});
if (!httpResponse.ok) {
const text = await httpResponse.text();
const errorMessage = text || `Jira API error ${httpResponse.status}`;
logger.warn(`JiraAssetsClient: AQL query failed (${httpResponse.status}): ${errorMessage}. Query: ${iql}`);
throw new Error(errorMessage);
}
response = await httpResponse.json() as { objectEntries: ObjectEntry[]; totalCount?: number; totalFilterCount?: number };
}
const totalCount = response.totalFilterCount || response.totalCount || 0;
const hasMore = response.objectEntries.length === pageSize && page * pageSize < totalCount;
return {
objectEntries: response.objectEntries || [],
totalCount,
hasMore,
};
}
/**
* Update an object
*/
async updateObject(objectId: string, payload: JiraUpdatePayload): Promise<void> {
if (!this.hasUserToken()) {
throw new Error('Jira Personal Access Token not configured. Please configure it in your user settings to enable saving changes to Jira.');
}
const url = `${this.baseUrl}/object/${objectId}`;
const response = await fetch(url, {
method: 'PUT',
headers: this.getHeaders(true),
body: JSON.stringify(payload),
});
if (!response.ok) {
const text = await response.text();
throw new Error(`Jira API error ${response.status}: ${text}`);
}
}
/**
* Get all schemas
*/
async getSchemas(): Promise<Array<{ id: string; name: string; description?: string }>> {
const url = `${this.baseUrl}/objectschema/list`;
const response = await fetch(url, {
headers: this.getHeaders(false),
});
if (!response.ok) {
const text = await response.text();
throw new Error(`Jira API error ${response.status}: ${text}`);
}
return await response.json() as Array<{ id: string; name: string; description?: string }>;
}
/**
* Get object types for a schema
*/
async getObjectTypes(schemaId: string): Promise<Array<{
id: number;
name: string;
description?: string;
objectCount?: number;
parentObjectTypeId?: number;
abstractObjectType?: boolean;
}>> {
// Try flat endpoint first
let url = `${this.baseUrl}/objectschema/${schemaId}/objecttypes/flat`;
let response = await fetch(url, {
headers: this.getHeaders(false),
});
if (!response.ok) {
// Fallback to regular endpoint
url = `${this.baseUrl}/objectschema/${schemaId}/objecttypes`;
response = await fetch(url, {
headers: this.getHeaders(false),
});
}
if (!response.ok) {
const text = await response.text();
throw new Error(`Jira API error ${response.status}: ${text}`);
}
const result = await response.json() as unknown;
if (Array.isArray(result)) {
return result as Array<{
id: number;
name: string;
description?: string;
objectCount?: number;
parentObjectTypeId?: number;
abstractObjectType?: boolean;
}>;
} else if (result && typeof result === 'object' && 'objectTypes' in result) {
return (result as { objectTypes: Array<{
id: number;
name: string;
description?: string;
objectCount?: number;
parentObjectTypeId?: number;
abstractObjectType?: boolean;
}> }).objectTypes;
}
return [];
}
/**
* Get attributes for an object type
*/
async getAttributes(typeId: number): Promise<Array<{
id: number;
name: string;
type: number;
typeValue?: string;
referenceObjectTypeId?: number;
referenceObjectType?: { id: number; name: string };
minimumCardinality?: number;
maximumCardinality?: number;
editable?: boolean;
hidden?: boolean;
system?: boolean;
description?: string;
}>> {
const url = `${this.baseUrl}/objecttype/${typeId}/attributes`;
const response = await fetch(url, {
headers: this.getHeaders(false),
});
if (!response.ok) {
logger.warn(`JiraAssetsClient: Failed to fetch attributes for type ${typeId}: ${response.status}`);
return [];
}
return await response.json() as Array<{
id: number;
name: string;
type: number;
typeValue?: string;
referenceObjectTypeId?: number;
referenceObjectType?: { id: number; name: string };
minimumCardinality?: number;
maximumCardinality?: number;
editable?: boolean;
hidden?: boolean;
system?: boolean;
description?: string;
}>;
}
}
// Export singleton instance
export const jiraAssetsClient = new JiraAssetsClient();

View File

@@ -0,0 +1,308 @@
/**
* ObjectCacheRepository - Data access for cached objects (EAV pattern)
*/
import type { DatabaseAdapter } from '../services/database/interface.js';
import { logger } from '../services/logger.js';
export interface ObjectRecord {
id: string;
objectKey: string;
objectTypeName: string;
label: string;
jiraUpdatedAt: string | null;
jiraCreatedAt: string | null;
cachedAt: string;
}
export interface AttributeValueRecord {
objectId: string;
attributeId: number;
textValue: string | null;
numberValue: number | null;
booleanValue: boolean | null;
dateValue: string | null;
datetimeValue: string | null;
referenceObjectId: string | null;
referenceObjectKey: string | null;
referenceObjectLabel: string | null;
arrayIndex: number;
}
export interface ObjectRelationRecord {
sourceId: string;
targetId: string;
attributeId: number;
sourceType: string;
targetType: string;
}
export class ObjectCacheRepository {
public db: DatabaseAdapter;
constructor(db: DatabaseAdapter) {
this.db = db;
}
/**
* Upsert an object record (minimal metadata)
*/
async upsertObject(object: {
id: string;
objectKey: string;
objectTypeName: string;
label: string;
jiraUpdatedAt?: string;
jiraCreatedAt?: string;
}): Promise<void> {
const cachedAt = new Date().toISOString();
await this.db.execute(
`INSERT INTO objects (id, object_key, object_type_name, label, jira_updated_at, jira_created_at, cached_at)
VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(id) DO UPDATE SET
object_key = excluded.object_key,
label = excluded.label,
jira_updated_at = excluded.jira_updated_at,
cached_at = excluded.cached_at`,
[
object.id,
object.objectKey,
object.objectTypeName,
object.label,
object.jiraUpdatedAt || null,
object.jiraCreatedAt || null,
cachedAt,
]
);
}
/**
* Get an object record by ID
*/
async getObject(objectId: string): Promise<ObjectRecord | null> {
return await this.db.queryOne<ObjectRecord>(
`SELECT id, object_key as objectKey, object_type_name as objectTypeName, label,
jira_updated_at as jiraUpdatedAt, jira_created_at as jiraCreatedAt, cached_at as cachedAt
FROM objects
WHERE id = ?`,
[objectId]
);
}
/**
* Get an object record by object key
*/
async getObjectByKey(objectKey: string): Promise<ObjectRecord | null> {
return await this.db.queryOne<ObjectRecord>(
`SELECT id, object_key as objectKey, object_type_name as objectTypeName, label,
jira_updated_at as jiraUpdatedAt, jira_created_at as jiraCreatedAt, cached_at as cachedAt
FROM objects
WHERE object_key = ?`,
[objectKey]
);
}
/**
* Delete all attribute values for an object
* Used when refreshing an object - we replace all attributes
*/
async deleteAttributeValues(objectId: string): Promise<void> {
await this.db.execute(
`DELETE FROM attribute_values WHERE object_id = ?`,
[objectId]
);
}
/**
* Upsert a single attribute value
*/
async upsertAttributeValue(value: {
objectId: string;
attributeId: number;
textValue?: string | null;
numberValue?: number | null;
booleanValue?: boolean | null;
dateValue?: string | null;
datetimeValue?: string | null;
referenceObjectId?: string | null;
referenceObjectKey?: string | null;
referenceObjectLabel?: string | null;
arrayIndex: number;
}): Promise<void> {
await this.db.execute(
`INSERT INTO attribute_values
(object_id, attribute_id, text_value, number_value, boolean_value, date_value, datetime_value,
reference_object_id, reference_object_key, reference_object_label, array_index)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(object_id, attribute_id, array_index) DO UPDATE SET
text_value = excluded.text_value,
number_value = excluded.number_value,
boolean_value = excluded.boolean_value,
date_value = excluded.date_value,
datetime_value = excluded.datetime_value,
reference_object_id = excluded.reference_object_id,
reference_object_key = excluded.reference_object_key,
reference_object_label = excluded.reference_object_label`,
[
value.objectId,
value.attributeId,
value.textValue || null,
value.numberValue || null,
value.booleanValue || null,
value.dateValue || null,
value.datetimeValue || null,
value.referenceObjectId || null,
value.referenceObjectKey || null,
value.referenceObjectLabel || null,
value.arrayIndex,
]
);
}
/**
* Batch upsert attribute values (much faster)
*/
async batchUpsertAttributeValues(values: Array<{
objectId: string;
attributeId: number;
textValue?: string | null;
numberValue?: number | null;
booleanValue?: boolean | null;
dateValue?: string | null;
datetimeValue?: string | null;
referenceObjectId?: string | null;
referenceObjectKey?: string | null;
referenceObjectLabel?: string | null;
arrayIndex: number;
}>): Promise<void> {
if (values.length === 0) return;
await this.db.transaction(async (db) => {
for (const value of values) {
await db.execute(
`INSERT INTO attribute_values
(object_id, attribute_id, text_value, number_value, boolean_value, date_value, datetime_value,
reference_object_id, reference_object_key, reference_object_label, array_index)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(object_id, attribute_id, array_index) DO UPDATE SET
text_value = excluded.text_value,
number_value = excluded.number_value,
boolean_value = excluded.boolean_value,
date_value = excluded.date_value,
datetime_value = excluded.datetime_value,
reference_object_id = excluded.reference_object_id,
reference_object_key = excluded.reference_object_key,
reference_object_label = excluded.reference_object_label`,
[
value.objectId,
value.attributeId,
value.textValue || null,
value.numberValue || null,
value.booleanValue || null,
value.dateValue || null,
value.datetimeValue || null,
value.referenceObjectId || null,
value.referenceObjectKey || null,
value.referenceObjectLabel || null,
value.arrayIndex,
]
);
}
});
}
/**
* Get all attribute values for an object
*/
async getAttributeValues(objectId: string): Promise<AttributeValueRecord[]> {
return await this.db.query<AttributeValueRecord>(
`SELECT object_id as objectId, attribute_id as attributeId, text_value as textValue,
number_value as numberValue, boolean_value as booleanValue,
date_value as dateValue, datetime_value as datetimeValue,
reference_object_id as referenceObjectId, reference_object_key as referenceObjectKey,
reference_object_label as referenceObjectLabel, array_index as arrayIndex
FROM attribute_values
WHERE object_id = ?
ORDER BY attribute_id, array_index`,
[objectId]
);
}
/**
* Upsert an object relation
*/
async upsertRelation(relation: {
sourceId: string;
targetId: string;
attributeId: number;
sourceType: string;
targetType: string;
}): Promise<void> {
await this.db.execute(
`INSERT INTO object_relations (source_id, target_id, attribute_id, source_type, target_type)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(source_id, target_id, attribute_id) DO NOTHING`,
[
relation.sourceId,
relation.targetId,
relation.attributeId,
relation.sourceType,
relation.targetType,
]
);
}
/**
* Delete all relations for an object (used when refreshing)
*/
async deleteRelations(objectId: string): Promise<void> {
await this.db.execute(
`DELETE FROM object_relations WHERE source_id = ?`,
[objectId]
);
}
/**
* Get objects of a specific type
*/
async getObjectsByType(
objectTypeName: string,
options: {
limit?: number;
offset?: number;
} = {}
): Promise<ObjectRecord[]> {
const { limit = 1000, offset = 0 } = options;
return await this.db.query<ObjectRecord>(
`SELECT id, object_key as objectKey, object_type_name as objectTypeName, label,
jira_updated_at as jiraUpdatedAt, jira_created_at as jiraCreatedAt, cached_at as cachedAt
FROM objects
WHERE object_type_name = ?
ORDER BY label
LIMIT ? OFFSET ?`,
[objectTypeName, limit, offset]
);
}
/**
* Count objects of a type
*/
async countObjectsByType(objectTypeName: string): Promise<number> {
const result = await this.db.queryOne<{ count: number | string }>(
`SELECT COUNT(*) as count FROM objects WHERE object_type_name = ?`,
[objectTypeName]
);
if (!result?.count) return 0;
return typeof result.count === 'string' ? parseInt(result.count, 10) : Number(result.count);
}
/**
* Delete an object (cascades to attribute_values and relations)
*/
async deleteObject(objectId: string): Promise<void> {
await this.db.execute(
`DELETE FROM objects WHERE id = ?`,
[objectId]
);
}
}

View File

@@ -0,0 +1,485 @@
/**
* SchemaRepository - Data access for schema metadata
*/
import type { DatabaseAdapter } from '../services/database/interface.js';
import { logger } from '../services/logger.js';
import { toPascalCase } from '../services/schemaUtils.js';
export interface SchemaRecord {
id: number;
jiraSchemaId: string;
name: string;
description: string | null;
discoveredAt: string;
updatedAt: string;
}
export interface ObjectTypeRecord {
id: number;
schemaId: number;
jiraTypeId: number;
typeName: string;
displayName: string;
description: string | null;
syncPriority: number;
objectCount: number;
enabled: boolean;
discoveredAt: string;
updatedAt: string;
}
export interface AttributeRecord {
id: number;
jiraAttrId: number;
objectTypeName: string;
attrName: string;
fieldName: string;
attrType: string;
isMultiple: boolean;
isEditable: boolean;
isRequired: boolean;
isSystem: boolean;
referenceTypeName: string | null;
description: string | null;
discoveredAt: string;
}
export class SchemaRepository {
constructor(private db: DatabaseAdapter) {}
/**
* Upsert a schema
*/
async upsertSchema(schema: {
jiraSchemaId: string;
name: string;
description?: string;
}): Promise<number> {
const now = new Date().toISOString();
// Check if exists
const existing = await this.db.queryOne<{ id: number }>(
`SELECT id FROM schemas WHERE jira_schema_id = ?`,
[schema.jiraSchemaId]
);
if (existing) {
await this.db.execute(
`UPDATE schemas SET name = ?, description = ?, updated_at = ? WHERE id = ?`,
[schema.name, schema.description || null, now, existing.id]
);
return existing.id;
} else {
await this.db.execute(
`INSERT INTO schemas (jira_schema_id, name, description, discovered_at, updated_at)
VALUES (?, ?, ?, ?, ?)`,
[schema.jiraSchemaId, schema.name, schema.description || null, now, now]
);
const result = await this.db.queryOne<{ id: number }>(
`SELECT id FROM schemas WHERE jira_schema_id = ?`,
[schema.jiraSchemaId]
);
return result?.id || 0;
}
}
/**
* Get all schemas
*/
async getAllSchemas(): Promise<SchemaRecord[]> {
return await this.db.query<SchemaRecord>(
`SELECT id, jira_schema_id as jiraSchemaId, name, description, discovered_at as discoveredAt, updated_at as updatedAt
FROM schemas
ORDER BY jira_schema_id`
);
}
/**
* Upsert an object type
*/
async upsertObjectType(
schemaId: number,
objectType: {
jiraTypeId: number;
typeName: string;
displayName: string;
description?: string;
syncPriority?: number;
objectCount?: number;
}
): Promise<number> {
const now = new Date().toISOString();
const existing = await this.db.queryOne<{ id: number }>(
`SELECT id FROM object_types WHERE schema_id = ? AND jira_type_id = ?`,
[schemaId, objectType.jiraTypeId]
);
if (existing) {
// Update existing record - ensure type_name is set if missing
// First check if type_name is NULL
const currentRecord = await this.db.queryOne<{ type_name: string | null }>(
`SELECT type_name FROM object_types WHERE id = ?`,
[existing.id]
);
// Determine what type_name value to use
let typeNameToUse: string | null = null;
if (objectType.typeName && objectType.typeName.trim() !== '') {
// Use provided typeName if available
typeNameToUse = objectType.typeName;
} else if (currentRecord?.type_name && currentRecord.type_name.trim() !== '') {
// Keep existing type_name if it exists and no new one provided
typeNameToUse = currentRecord.type_name;
} else {
// Generate type_name from display_name if missing
typeNameToUse = toPascalCase(objectType.displayName);
logger.warn(`SchemaRepository.upsertObjectType: Generated missing type_name "${typeNameToUse}" from display_name "${objectType.displayName}" for id=${existing.id}`);
}
// Only update type_name if we have a valid value (never set to NULL)
if (typeNameToUse && typeNameToUse.trim() !== '') {
await this.db.execute(
`UPDATE object_types
SET display_name = ?, description = ?, sync_priority = ?, object_count = ?,
type_name = ?, updated_at = ?
WHERE id = ?`,
[
objectType.displayName,
objectType.description || null,
objectType.syncPriority || 0,
objectType.objectCount || 0,
typeNameToUse,
now,
existing.id,
]
);
} else {
// Shouldn't happen, but log if it does
logger.error(`SchemaRepository.upsertObjectType: Cannot update type_name - all sources are empty for id=${existing.id}`);
// Still update other fields, but don't touch type_name
await this.db.execute(
`UPDATE object_types
SET display_name = ?, description = ?, sync_priority = ?, object_count = ?,
updated_at = ?
WHERE id = ?`,
[
objectType.displayName,
objectType.description || null,
objectType.syncPriority || 0,
objectType.objectCount || 0,
now,
existing.id,
]
);
}
return existing.id;
} else {
await this.db.execute(
`INSERT INTO object_types (schema_id, jira_type_id, type_name, display_name, description, sync_priority, object_count, enabled, discovered_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
schemaId,
objectType.jiraTypeId,
objectType.typeName,
objectType.displayName,
objectType.description || null,
objectType.syncPriority || 0,
objectType.objectCount || 0,
false, // Default: disabled
now,
now,
]
);
const result = await this.db.queryOne<{ id: number }>(
`SELECT id FROM object_types WHERE schema_id = ? AND jira_type_id = ?`,
[schemaId, objectType.jiraTypeId]
);
return result?.id || 0;
}
}
/**
* Get enabled object types
*/
async getEnabledObjectTypes(): Promise<ObjectTypeRecord[]> {
// Handle both PostgreSQL (boolean) and SQLite (integer) for enabled column
const isPostgres = this.db.isPostgres === true;
// For PostgreSQL: enabled is BOOLEAN, so 'enabled = true' works
// For SQLite: enabled is INTEGER (0/1), so 'enabled = 1' works
// However, some adapters might return booleans as 1/0 in both cases
// So we check for both boolean true and integer 1
const enabledCondition = isPostgres
? 'enabled IS true' // PostgreSQL: IS true is more explicit than = true
: 'enabled = 1'; // SQLite: explicit integer comparison
// Query without aliases first to ensure we get the raw values
const rawResults = await this.db.query<{
id: number;
schema_id: number;
jira_type_id: number;
type_name: string | null;
display_name: string;
description: string | null;
sync_priority: number;
object_count: number;
enabled: boolean | number;
discovered_at: string;
updated_at: string;
}>(
`SELECT id, schema_id, jira_type_id, type_name, display_name, description,
sync_priority, object_count, enabled, discovered_at, updated_at
FROM object_types
WHERE ${enabledCondition}
ORDER BY sync_priority, type_name`
);
logger.debug(`SchemaRepository.getEnabledObjectTypes: Raw query found ${rawResults.length} enabled types. Raw type_name values: ${JSON.stringify(rawResults.map(r => ({ id: r.id, type_name: r.type_name, type_name_type: typeof r.type_name, display_name: r.display_name })))}`);
// Map to ObjectTypeRecord format manually to ensure proper mapping
const results: ObjectTypeRecord[] = rawResults.map(r => ({
id: r.id,
schemaId: r.schema_id,
jiraTypeId: r.jira_type_id,
typeName: r.type_name || '', // Convert null to empty string if needed
displayName: r.display_name,
description: r.description,
syncPriority: r.sync_priority,
objectCount: r.object_count,
enabled: r.enabled === true || r.enabled === 1,
discoveredAt: r.discovered_at,
updatedAt: r.updated_at,
}));
// Debug: Log what we found
logger.debug(`SchemaRepository.getEnabledObjectTypes: Found ${results.length} enabled types (isPostgres: ${isPostgres}, condition: ${enabledCondition})`);
if (results.length > 0) {
// Log raw results to see what we're actually getting
logger.debug(`SchemaRepository.getEnabledObjectTypes: Raw results: ${JSON.stringify(results.map(r => ({
id: r.id,
typeName: r.typeName,
typeNameType: typeof r.typeName,
typeNameLength: r.typeName?.length,
displayName: r.displayName,
enabled: r.enabled
})))}`);
// Check for missing typeName
const missingTypeName = results.filter(r => !r.typeName || r.typeName.trim() === '');
if (missingTypeName.length > 0) {
logger.error(`SchemaRepository.getEnabledObjectTypes: Found ${missingTypeName.length} enabled types with missing typeName: ${JSON.stringify(missingTypeName.map(r => ({
id: r.id,
jiraTypeId: r.jiraTypeId,
displayName: r.displayName,
typeName: r.typeName,
typeNameType: typeof r.typeName,
rawTypeName: JSON.stringify(r.typeName)
})))}`);
// Try to query directly to see what the DB actually has
for (const missing of missingTypeName) {
const directCheck = await this.db.queryOne<{ type_name: string | null }>(
`SELECT type_name FROM object_types WHERE id = ?`,
[missing.id]
);
logger.error(`SchemaRepository.getEnabledObjectTypes: Direct query for id=${missing.id} returned type_name: ${JSON.stringify(directCheck?.type_name)}`);
}
}
logger.debug(`SchemaRepository.getEnabledObjectTypes: Type names: ${results.map(r => `${r.typeName || 'NULL'}(enabled:${r.enabled}, type:${typeof r.enabled})`).join(', ')}`);
// Also check what gets filtered out
const filteredResults = results.filter(r => r.typeName && r.typeName.trim() !== '');
if (filteredResults.length < results.length) {
logger.warn(`SchemaRepository.getEnabledObjectTypes: Filtered out ${results.length - filteredResults.length} results with missing typeName`);
}
} else {
// Debug: Check if there are any enabled types at all (check the actual query)
const enabledCheck = await this.db.query<{ count: number }>(
isPostgres
? `SELECT COUNT(*) as count FROM object_types WHERE enabled IS true`
: `SELECT COUNT(*) as count FROM object_types WHERE enabled = 1`
);
logger.warn(`SchemaRepository.getEnabledObjectTypes: No enabled types found with query. Query found ${enabledCheck[0]?.count || 0} enabled types.`);
// Also check what types are actually in the DB
const allTypes = await this.db.query<{ typeName: string; enabled: boolean | number; id: number }>(
`SELECT id, type_name as typeName, enabled FROM object_types WHERE enabled IS NOT NULL ORDER BY enabled DESC LIMIT 10`
);
logger.warn(`SchemaRepository.getEnabledObjectTypes: Sample types from DB: ${allTypes.map(t => `id=${t.id}, ${t.typeName || 'NULL'}=enabled:${t.enabled}(${typeof t.enabled})`).join(', ')}`);
}
// Filter out results with missing typeName
return results.filter(r => r.typeName && r.typeName.trim() !== '');
}
/**
* Get object type by type name
*/
async getObjectTypeByTypeName(typeName: string): Promise<ObjectTypeRecord | null> {
return await this.db.queryOne<ObjectTypeRecord>(
`SELECT id, schema_id as schemaId, jira_type_id as jiraTypeId, type_name as typeName,
display_name as displayName, description, sync_priority as syncPriority,
object_count as objectCount, enabled, discovered_at as discoveredAt, updated_at as updatedAt
FROM object_types
WHERE type_name = ?`,
[typeName]
);
}
/**
* Get object type by Jira type ID
* Note: Jira type IDs are global across schemas, but we store them per schema.
* This method returns the first matching type found (any schema).
*/
async getObjectTypeByJiraId(jiraTypeId: number): Promise<ObjectTypeRecord | null> {
const result = await this.db.queryOne<ObjectTypeRecord>(
`SELECT id, schema_id as schemaId, jira_type_id as jiraTypeId, type_name as typeName,
display_name as displayName, description, sync_priority as syncPriority,
object_count as objectCount, enabled, discovered_at as discoveredAt, updated_at as updatedAt
FROM object_types
WHERE jira_type_id = ?
LIMIT 1`,
[jiraTypeId]
);
if (!result) {
// Diagnostic: Check if this type ID exists in any schema
const db = this.db;
try {
const allSchemasWithType = await db.query<{ schema_id: number; jira_schema_id: string; schema_name: string; count: number }>(
`SELECT ot.schema_id, s.jira_schema_id, s.name as schema_name, COUNT(*) as count
FROM object_types ot
JOIN schemas s ON ot.schema_id = s.id
WHERE ot.jira_type_id = ?
GROUP BY ot.schema_id, s.jira_schema_id, s.name`,
[jiraTypeId]
);
if (allSchemasWithType.length === 0) {
logger.debug(`SchemaRepository: Jira type ID ${jiraTypeId} not found in any schema. This object type needs to be discovered via schema discovery.`);
} else {
logger.debug(`SchemaRepository: Jira type ID ${jiraTypeId} exists in ${allSchemasWithType.length} schema(s): ${allSchemasWithType.map(s => `${s.schema_name} (ID: ${s.jira_schema_id})`).join(', ')}`);
}
} catch (error) {
logger.debug(`SchemaRepository: Failed to check schema existence for type ID ${jiraTypeId}`, error);
}
}
return result;
}
/**
* Upsert an attribute
*/
async upsertAttribute(attribute: {
jiraAttrId: number;
objectTypeName: string;
attrName: string;
fieldName: string;
attrType: string;
isMultiple: boolean;
isEditable: boolean;
isRequired: boolean;
isSystem: boolean;
referenceTypeName?: string;
description?: string;
}): Promise<number> {
const now = new Date().toISOString();
const existing = await this.db.queryOne<{ id: number }>(
`SELECT id FROM attributes WHERE jira_attr_id = ? AND object_type_name = ?`,
[attribute.jiraAttrId, attribute.objectTypeName]
);
if (existing) {
await this.db.execute(
`UPDATE attributes
SET attr_name = ?, field_name = ?, attr_type = ?, is_multiple = ?, is_editable = ?,
is_required = ?, is_system = ?, reference_type_name = ?, description = ?
WHERE id = ?`,
[
attribute.attrName,
attribute.fieldName,
attribute.attrType,
attribute.isMultiple,
attribute.isEditable,
attribute.isRequired,
attribute.isSystem,
attribute.referenceTypeName || null,
attribute.description || null,
existing.id,
]
);
return existing.id;
} else {
await this.db.execute(
`INSERT INTO attributes (jira_attr_id, object_type_name, attr_name, field_name, attr_type,
is_multiple, is_editable, is_required, is_system, reference_type_name, description, discovered_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
[
attribute.jiraAttrId,
attribute.objectTypeName,
attribute.attrName,
attribute.fieldName,
attribute.attrType,
attribute.isMultiple,
attribute.isEditable,
attribute.isRequired,
attribute.isSystem,
attribute.referenceTypeName || null,
attribute.description || null,
now,
]
);
const result = await this.db.queryOne<{ id: number }>(
`SELECT id FROM attributes WHERE jira_attr_id = ? AND object_type_name = ?`,
[attribute.jiraAttrId, attribute.objectTypeName]
);
return result?.id || 0;
}
}
/**
* Get attributes for an object type
*/
async getAttributesForType(objectTypeName: string): Promise<AttributeRecord[]> {
return await this.db.query<AttributeRecord>(
`SELECT id, jira_attr_id as jiraAttrId, object_type_name as objectTypeName, attr_name as attrName,
field_name as fieldName, attr_type as attrType, is_multiple as isMultiple,
is_editable as isEditable, is_required as isRequired, is_system as isSystem,
reference_type_name as referenceTypeName, description, discovered_at as discoveredAt
FROM attributes
WHERE object_type_name = ?
ORDER BY jira_attr_id`,
[objectTypeName]
);
}
/**
* Get attribute by object type and field name
*/
async getAttributeByFieldName(objectTypeName: string, fieldName: string): Promise<AttributeRecord | null> {
return await this.db.queryOne<AttributeRecord>(
`SELECT id, jira_attr_id as jiraAttrId, object_type_name as objectTypeName, attr_name as attrName,
field_name as fieldName, attr_type as attrType, is_multiple as isMultiple,
is_editable as isEditable, is_required as isRequired, is_system as isSystem,
reference_type_name as referenceTypeName, description, discovered_at as discoveredAt
FROM attributes
WHERE object_type_name = ? AND field_name = ?`,
[objectTypeName, fieldName]
);
}
/**
* Get attribute ID by object type and Jira attribute ID
*/
async getAttributeId(objectTypeName: string, jiraAttrId: number): Promise<number | null> {
const result = await this.db.queryOne<{ id: number }>(
`SELECT id FROM attributes WHERE object_type_name = ? AND jira_attr_id = ?`,
[objectTypeName, jiraAttrId]
);
return result?.id || null;
}
}

View File

@@ -326,8 +326,9 @@ router.get('/bia-comparison', async (req: Request, res: Response) => {
// Query params:
// - mode=edit: Force refresh from Jira for editing (includes _jiraUpdatedAt for conflict detection)
router.get('/:id', async (req: Request, res: Response) => {
const id = getParamString(req, 'id');
try {
const id = getParamString(req, 'id');
const mode = getQueryString(req, 'mode');
// Don't treat special routes as application IDs
@@ -342,7 +343,7 @@ router.get('/:id', async (req: Request, res: Response) => {
: await dataService.getApplicationById(id);
if (!application) {
res.status(404).json({ error: 'Application not found' });
res.status(404).json({ error: 'Application not found', id });
return;
}
@@ -355,8 +356,15 @@ router.get('/:id', async (req: Request, res: Response) => {
res.json(applicationWithCompleteness);
} catch (error) {
logger.error('Failed to get application', error);
res.status(500).json({ error: 'Failed to get application' });
logger.error(`Failed to get application ${id}`, error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
const errorDetails = error instanceof Error && error.stack ? error.stack : String(error);
logger.debug(`Error details for application ${id}:`, errorDetails);
res.status(500).json({
error: 'Failed to get application',
details: errorMessage,
id: id,
});
}
});
@@ -625,34 +633,101 @@ router.get('/:id/related/:objectType', async (req: Request, res: Response) => {
type RelatedObjectType = Server | Flows | Certificate | Domain | AzureSubscription;
let relatedObjects: RelatedObjectType[] = [];
// Get requested attributes from query string (needed for fallback)
const attributesParam = getQueryString(req, 'attributes');
const requestedAttrs = attributesParam
? attributesParam.split(',').map(a => a.trim())
: [];
logger.debug(`Getting related objects for application ${id}, objectType: ${objectType}, typeName: ${typeName}, requestedAttrs: ${requestedAttrs.join(',') || 'none'}`);
// First try to get from cache
switch (typeName) {
case 'Server':
relatedObjects = await cmdbService.getReferencingObjects<Server>(id, 'Server');
logger.debug(`Found ${relatedObjects.length} Servers referencing application ${id} in cache`);
break;
case 'Flows': {
// Flows reference ApplicationComponents via Source and Target attributes
// We need to find Flows where this ApplicationComponent is the target of the reference
relatedObjects = await cmdbService.getReferencingObjects<Flows>(id, 'Flows');
logger.debug(`Found ${relatedObjects.length} Flows referencing application ${id} in cache`);
break;
}
case 'Certificate':
relatedObjects = await cmdbService.getReferencingObjects<Certificate>(id, 'Certificate');
logger.debug(`Found ${relatedObjects.length} Certificates referencing application ${id} in cache`);
break;
case 'Domain':
relatedObjects = await cmdbService.getReferencingObjects<Domain>(id, 'Domain');
logger.debug(`Found ${relatedObjects.length} Domains referencing application ${id} in cache`);
break;
case 'AzureSubscription':
relatedObjects = await cmdbService.getReferencingObjects<AzureSubscription>(id, 'AzureSubscription');
logger.debug(`Found ${relatedObjects.length} AzureSubscriptions referencing application ${id} in cache`);
break;
default:
relatedObjects = [];
logger.warn(`Unknown object type for related objects: ${typeName}`);
}
// If no objects found in cache, try to fetch from Jira directly as fallback
// This helps when relations haven't been synced yet
if (relatedObjects.length === 0) {
try {
// Get application to get its objectKey
const app = await cmdbService.getObject('ApplicationComponent', id);
if (!app) {
logger.warn(`Application ${id} not found in cache, cannot fetch related objects from Jira`);
} else if (!app.objectKey) {
logger.warn(`Application ${id} has no objectKey, cannot fetch related objects from Jira`);
} else {
logger.info(`No related ${typeName} objects found in cache for application ${id} (${app.objectKey}), trying Jira directly...`);
const { jiraAssetsService } = await import('../services/jiraAssets.js');
// Use the Jira object type name from schema (not our internal typeName)
const { OBJECT_TYPES } = await import('../generated/jira-schema.js');
const jiraTypeDef = OBJECT_TYPES[typeName];
const jiraObjectTypeName = jiraTypeDef?.name || objectType;
logger.debug(`Using Jira object type name: "${jiraObjectTypeName}" for internal type "${typeName}"`);
const jiraResult = await jiraAssetsService.getRelatedObjects(app.objectKey, jiraObjectTypeName, requestedAttrs);
logger.debug(`Jira query returned ${jiraResult?.objects?.length || 0} objects`);
if (jiraResult && jiraResult.objects && jiraResult.objects.length > 0) {
logger.info(`Found ${jiraResult.objects.length} related ${typeName} objects from Jira, caching them...`);
// Batch fetch and cache all objects at once (much more efficient)
const objectIds = jiraResult.objects.map(obj => obj.id.toString());
const cachedObjects = await cmdbService.batchFetchAndCacheObjects(typeName as CMDBObjectTypeName, objectIds);
logger.info(`Successfully batch cached ${cachedObjects.length} of ${jiraResult.objects.length} related ${typeName} objects`);
// Use cached objects, fallback to minimal objects from Jira result if not found
const cachedById = new Map(cachedObjects.map(obj => [obj.id, obj]));
relatedObjects = jiraResult.objects.map((jiraObj) => {
const cached = cachedById.get(jiraObj.id.toString());
if (cached) {
return cached as RelatedObjectType;
}
// Fallback: create minimal object from Jira result
logger.debug(`Creating minimal object for ${jiraObj.id} (${jiraObj.key}) as cache lookup failed`);
return {
id: jiraObj.id.toString(),
objectKey: jiraObj.key,
label: jiraObj.label,
_objectType: typeName,
} as RelatedObjectType;
});
logger.info(`Loaded ${relatedObjects.length} related ${typeName} objects (${relatedObjects.filter(o => o).length} valid)`);
} else {
logger.info(`No related ${typeName} objects found in Jira for application ${app.objectKey}`);
}
}
} catch (error) {
logger.error(`Failed to fetch related ${typeName} objects from Jira as fallback for application ${id}:`, error);
}
}
// Get requested attributes from query string
const attributesParam = getQueryString(req, 'attributes');
const requestedAttrs = attributesParam
? attributesParam.split(',').map(a => a.trim())
: [];
// Format response - must match RelatedObjectsResponse type expected by frontend
const objects = relatedObjects.map(obj => {

View File

@@ -84,7 +84,11 @@ router.get('/me', async (req: Request, res: Response) => {
// The sessionId should already be set by authMiddleware from cookies
const sessionId = req.sessionId || req.headers['x-session-id'] as string || req.cookies?.sessionId;
logger.debug(`[GET /me] SessionId: ${sessionId ? sessionId.substring(0, 8) + '...' : 'none'}, Cookies: ${JSON.stringify(req.cookies)}`);
// Only log relevant cookies to avoid noise from other applications
const relevantCookies = req.cookies ? {
sessionId: req.cookies.sessionId ? req.cookies.sessionId.substring(0, 8) + '...' : undefined,
} : {};
logger.debug(`[GET /me] SessionId: ${sessionId ? sessionId.substring(0, 8) + '...' : 'none'}, Relevant cookies: ${JSON.stringify(relevantCookies)}`);
// Service accounts are NOT used for application authentication
// They are only used for Jira API access (configured in .env as JIRA_SERVICE_ACCOUNT_TOKEN)
@@ -456,9 +460,11 @@ router.post('/accept-invitation', async (req: Request, res: Response) => {
export async function authMiddleware(req: Request, res: Response, next: NextFunction) {
const sessionId = req.headers['x-session-id'] as string || req.cookies?.sessionId;
// Debug logging for cookie issues
// Debug logging for cookie issues (only log relevant cookies to avoid noise)
if (req.path === '/api/auth/me') {
logger.debug(`[authMiddleware] Path: ${req.path}, Cookies: ${JSON.stringify(req.cookies)}, SessionId from cookie: ${req.cookies?.sessionId}, SessionId from header: ${req.headers['x-session-id']}`);
const sessionIdFromCookie = req.cookies?.sessionId ? req.cookies.sessionId.substring(0, 8) + '...' : 'none';
const sessionIdFromHeader = req.headers['x-session-id'] ? String(req.headers['x-session-id']).substring(0, 8) + '...' : 'none';
logger.debug(`[authMiddleware] Path: ${req.path}, SessionId from cookie: ${sessionIdFromCookie}, SessionId from header: ${sessionIdFromHeader}`);
}
if (sessionId) {

View File

@@ -5,7 +5,7 @@
*/
import { Router, Request, Response } from 'express';
import { cacheStore } from '../services/cacheStore.js';
import { normalizedCacheStore as cacheStore } from '../services/normalizedCacheStore.js';
import { syncEngine } from '../services/syncEngine.js';
import { logger } from '../services/logger.js';
import { requireAuth, requirePermission } from '../middleware/authorization.js';
@@ -30,17 +30,24 @@ router.get('/status', async (req: Request, res: Response) => {
if (cacheStats.objectsByType['ApplicationComponent'] !== undefined) {
try {
const { jiraAssetsClient } = await import('../services/jiraAssetsClient.js');
const { schemaMappingService } = await import('../services/schemaMappingService.js');
const { OBJECT_TYPES } = await import('../generated/jira-schema.js');
const typeDef = OBJECT_TYPES['ApplicationComponent'];
if (typeDef) {
const searchResult = await jiraAssetsClient.searchObjects(`objectType = "${typeDef.name}"`, 1, 1);
const jiraCount = searchResult.totalCount;
const cacheCount = cacheStats.objectsByType['ApplicationComponent'] || 0;
jiraComparison = {
jiraCount,
cacheCount,
difference: jiraCount - cacheCount,
};
// Get schema ID for ApplicationComponent
const schemaId = await schemaMappingService.getSchemaId('ApplicationComponent');
// Skip if no schema ID is available
if (schemaId && schemaId.trim() !== '') {
const searchResult = await jiraAssetsClient.searchObjects(`objectType = "${typeDef.name}"`, 1, 1, schemaId);
const jiraCount = searchResult.totalCount;
const cacheCount = cacheStats.objectsByType['ApplicationComponent'] || 0;
jiraComparison = {
jiraCount,
cacheCount,
difference: jiraCount - cacheCount,
};
}
}
} catch (err) {
logger.debug('Could not fetch Jira count for comparison', err);
@@ -64,6 +71,17 @@ router.post('/sync', async (req: Request, res: Response) => {
try {
logger.info('Manual full sync triggered');
// Check if configuration is complete
const { schemaConfigurationService } = await import('../services/schemaConfigurationService.js');
const isConfigured = await schemaConfigurationService.isConfigurationComplete();
if (!isConfigured) {
res.status(400).json({
error: 'Schema configuration not complete',
message: 'Please configure at least one object type to be synced in the settings page before starting sync.',
});
return;
}
// Don't wait for completion - return immediately
syncEngine.fullSync().catch(err => {
logger.error('Full sync failed', err);
@@ -75,7 +93,11 @@ router.post('/sync', async (req: Request, res: Response) => {
});
} catch (error) {
logger.error('Failed to trigger full sync', error);
res.status(500).json({ error: 'Failed to trigger sync' });
const errorMessage = error instanceof Error ? error.message : 'Failed to trigger sync';
res.status(500).json({
error: errorMessage,
details: error instanceof Error ? error.stack : undefined
});
}
});
@@ -116,6 +138,39 @@ router.post('/sync/:objectType', async (req: Request, res: Response) => {
}
});
// Refresh a specific application (force re-sync from Jira)
router.post('/refresh-application/:id', async (req: Request, res: Response) => {
try {
const id = getParamString(req, 'id');
const { cmdbService } = await import('../services/cmdbService.js');
logger.info(`Manual refresh triggered for application ${id}`);
// Force refresh from Jira
const app = await cmdbService.getObject('ApplicationComponent', id, { forceRefresh: true });
if (!app) {
res.status(404).json({ error: `Application ${id} not found in Jira` });
return;
}
res.json({
status: 'refreshed',
applicationId: id,
applicationKey: app.objectKey,
message: 'Application refreshed from Jira and cached with updated schema',
});
} catch (error) {
const id = getParamString(req, 'id');
const errorMessage = error instanceof Error ? error.message : 'Failed to refresh application';
logger.error(`Failed to refresh application ${id}`, error);
res.status(500).json({
error: errorMessage,
applicationId: id,
});
}
});
// Clear cache for a specific type
router.delete('/clear/:objectType', async (req: Request, res: Response) => {
try {

View File

@@ -0,0 +1,488 @@
/**
* Data Validation routes
*
* Provides endpoints for validating and inspecting data in the cache/database.
*/
import { Router, Request, Response } from 'express';
import { normalizedCacheStore as cacheStore } from '../services/normalizedCacheStore.js';
import { logger } from '../services/logger.js';
import { requireAuth, requirePermission } from '../middleware/authorization.js';
import { getQueryString, getParamString } from '../utils/queryHelpers.js';
import { schemaCacheService } from '../services/schemaCacheService.js';
import { jiraAssetsClient } from '../services/jiraAssetsClient.js';
import { dataIntegrityService } from '../services/dataIntegrityService.js';
import { schemaMappingService } from '../services/schemaMappingService.js';
import { getDatabaseAdapter } from '../services/database/singleton.js';
import type { CMDBObjectTypeName } from '../generated/jira-types.js';
const router = Router();
// All routes require authentication and manage_settings permission
router.use(requireAuth);
router.use(requirePermission('manage_settings'));
/**
* GET /api/data-validation/stats
* Get comprehensive data validation statistics
*/
router.get('/stats', async (req: Request, res: Response) => {
try {
const db = getDatabaseAdapter();
const cacheStats = await cacheStore.getStats();
// Get object counts by type from cache
const objectsByType = cacheStats.objectsByType;
// Get schema from database (via cache)
const schema = await schemaCacheService.getSchema();
const objectTypes = schema.objectTypes;
const typeNames = Object.keys(objectTypes);
// Get schema information for each object type (join with schemas table)
const schemaInfoMap = new Map<string, { schemaId: string; schemaName: string }>();
try {
const schemaInfoRows = await db.query<{
type_name: string;
jira_schema_id: string;
schema_name: string;
}>(`
SELECT ot.type_name, s.jira_schema_id, s.name as schema_name
FROM object_types ot
JOIN schemas s ON ot.schema_id = s.id
WHERE ot.type_name IN (${typeNames.map(() => '?').join(',')})
`, typeNames);
for (const row of schemaInfoRows) {
schemaInfoMap.set(row.type_name, {
schemaId: row.jira_schema_id,
schemaName: row.schema_name,
});
}
} catch (error) {
logger.debug('Failed to fetch schema information', error);
}
// Get Jira counts for comparison
const jiraCounts: Record<string, number> = {};
// Fetch counts from Jira in parallel, using schema IDs from database
const countPromises = typeNames.map(async (typeName) => {
try {
// Get schema ID from the database (already fetched above)
const schemaInfo = schemaInfoMap.get(typeName);
// If no schema info from database, try schemaMappingService as fallback
let schemaId: string | undefined = schemaInfo?.schemaId;
if (!schemaId || schemaId.trim() === '') {
schemaId = await schemaMappingService.getSchemaId(typeName);
}
// Skip if no schema ID is available (object type not configured)
if (!schemaId || schemaId.trim() === '') {
logger.debug(`No schema ID configured for ${typeName}, skipping Jira count`);
jiraCounts[typeName] = 0;
return { typeName, count: 0 };
}
const count = await jiraAssetsClient.getObjectCount(typeName, schemaId);
jiraCounts[typeName] = count;
return { typeName, count };
} catch (error) {
logger.debug(`Failed to get Jira count for ${typeName}`, error);
jiraCounts[typeName] = 0;
return { typeName, count: 0 };
}
});
await Promise.all(countPromises);
// Calculate differences
const typeComparisons: Array<{
typeName: string;
typeDisplayName: string;
schemaId?: string;
schemaName?: string;
cacheCount: number;
jiraCount: number;
difference: number;
syncStatus: 'synced' | 'outdated' | 'missing';
}> = [];
for (const [typeName, typeDef] of Object.entries(objectTypes)) {
const cacheCount = objectsByType[typeName] || 0;
const jiraCount = jiraCounts[typeName] || 0;
const difference = jiraCount - cacheCount;
let syncStatus: 'synced' | 'outdated' | 'missing';
if (cacheCount === 0 && jiraCount > 0) {
syncStatus = 'missing';
} else if (difference > 0) {
syncStatus = 'outdated';
} else {
syncStatus = 'synced';
}
const schemaInfo = schemaInfoMap.get(typeName);
typeComparisons.push({
typeName,
typeDisplayName: typeDef.name,
schemaId: schemaInfo?.schemaId,
schemaName: schemaInfo?.schemaName,
cacheCount,
jiraCount,
difference,
syncStatus,
});
}
// Sort by difference (most outdated first)
typeComparisons.sort((a, b) => b.difference - a.difference);
// Get relation statistics
const relationStats = {
total: cacheStats.totalRelations,
// Could add more detailed relation stats here
};
// Check for broken references (references to objects that don't exist)
let brokenReferences = 0;
try {
brokenReferences = await cacheStore.getBrokenReferencesCount();
} catch (error) {
logger.debug('Could not check for broken references', error);
}
// Get objects with missing required attributes
// This would require schema information, so we'll skip for now
res.json({
cache: {
totalObjects: cacheStats.totalObjects,
totalRelations: cacheStats.totalRelations,
objectsByType,
isWarm: cacheStats.isWarm,
dbSizeBytes: cacheStats.dbSizeBytes,
lastFullSync: cacheStats.lastFullSync,
lastIncrementalSync: cacheStats.lastIncrementalSync,
},
jira: {
counts: jiraCounts,
},
comparison: {
typeComparisons,
totalOutdated: typeComparisons.filter(t => t.syncStatus === 'outdated').length,
totalMissing: typeComparisons.filter(t => t.syncStatus === 'missing').length,
totalSynced: typeComparisons.filter(t => t.syncStatus === 'synced').length,
},
validation: {
brokenReferences,
// Add more validation metrics here
},
relations: relationStats,
});
} catch (error) {
logger.error('Failed to get data validation stats', error);
res.status(500).json({ error: 'Failed to get data validation stats' });
}
});
/**
* GET /api/data-validation/objects/:typeName
* Get sample objects of a specific type for inspection
*/
router.get('/objects/:typeName', async (req: Request, res: Response) => {
try {
const typeName = getParamString(req, 'typeName');
const limit = parseInt(getQueryString(req, 'limit') || '10', 10);
const offset = parseInt(getQueryString(req, 'offset') || '0', 10);
// Get schema from database (via cache)
const schema = await schemaCacheService.getSchema();
const objectTypes = schema.objectTypes;
if (!objectTypes[typeName]) {
res.status(400).json({
error: `Unknown object type: ${typeName}`,
supportedTypes: Object.keys(objectTypes),
});
return;
}
const objects = await cacheStore.getObjects(typeName as CMDBObjectTypeName, { limit, offset });
const total = await cacheStore.countObjects(typeName as CMDBObjectTypeName);
res.json({
typeName,
typeDisplayName: objectTypes[typeName].name,
objects,
pagination: {
limit,
offset,
total,
hasMore: offset + limit < total,
},
});
} catch (error) {
const typeName = getParamString(req, 'typeName');
logger.error(`Failed to get objects for type ${typeName}`, error);
res.status(500).json({ error: 'Failed to get objects' });
}
});
/**
* GET /api/data-validation/object/:id
* Get a specific object by ID for inspection
*/
router.get('/object/:id', async (req: Request, res: Response) => {
try {
const id = getParamString(req, 'id');
// Try to find the object in any type
// First, get the object's metadata
const objRow = await cacheStore.getObjectMetadata(id);
if (!objRow) {
res.status(404).json({ error: `Object ${id} not found in cache` });
return;
}
// Get schema from database (via cache)
const schema = await schemaCacheService.getSchema();
const objectTypes = schema.objectTypes;
const object = await cacheStore.getObject(objRow.object_type_name as any, id);
if (!object) {
res.status(404).json({ error: `Object ${id} could not be reconstructed` });
return;
}
res.json({
object,
metadata: {
typeName: objRow.object_type_name,
typeDisplayName: objectTypes[objRow.object_type_name]?.name || objRow.object_type_name,
objectKey: objRow.object_key,
label: objRow.label,
},
});
} catch (error) {
const id = getParamString(req, 'id');
logger.error(`Failed to get object ${id}`, error);
res.status(500).json({ error: 'Failed to get object' });
}
});
/**
* GET /api/data-validation/broken-references
* Get list of broken references (references to objects that don't exist)
*/
router.get('/broken-references', async (req: Request, res: Response) => {
try {
const limit = parseInt(getQueryString(req, 'limit') || '50', 10);
const offset = parseInt(getQueryString(req, 'offset') || '0', 10);
// Get broken references with details
const brokenRefs = await cacheStore.getBrokenReferences(limit, offset);
// Get total count
const total = await cacheStore.getBrokenReferencesCount();
res.json({
brokenReferences: brokenRefs,
pagination: {
limit,
offset,
total,
hasMore: offset + limit < total,
},
});
} catch (error) {
logger.error('Failed to get broken references', error);
res.status(500).json({ error: 'Failed to get broken references' });
}
});
/**
* POST /api/data-validation/repair-broken-references
* Repair broken references
*
* Query params:
* - mode: 'delete' | 'fetch' | 'dry-run' (default: 'fetch')
* - batchSize: number (default: 100)
* - maxRepairs: number (default: 0 = unlimited)
*/
router.post('/repair-broken-references', async (req: Request, res: Response) => {
try {
const mode = (getQueryString(req, 'mode') || 'fetch') as 'delete' | 'fetch' | 'dry-run';
const batchSize = parseInt(getQueryString(req, 'batchSize') || '100', 10);
const maxRepairs = parseInt(getQueryString(req, 'maxRepairs') || '0', 10);
if (!['delete', 'fetch', 'dry-run'].includes(mode)) {
res.status(400).json({ error: 'Invalid mode. Must be: delete, fetch, or dry-run' });
return;
}
logger.info(`DataValidation: Starting repair broken references (mode: ${mode}, batchSize: ${batchSize}, maxRepairs: ${maxRepairs})`);
const result = await dataIntegrityService.repairBrokenReferences(mode, batchSize, maxRepairs);
res.json({
status: 'completed',
mode,
result,
});
} catch (error) {
logger.error('Failed to repair broken references', error);
res.status(500).json({ error: 'Failed to repair broken references' });
}
});
/**
* POST /api/data-validation/full-integrity-check
* Run full integrity check and optionally repair
*
* Query params:
* - repair: boolean (default: false)
*/
router.post('/full-integrity-check', async (req: Request, res: Response) => {
try {
const repair = getQueryString(req, 'repair') === 'true';
logger.info(`DataValidation: Starting full integrity check (repair: ${repair})`);
const result = await dataIntegrityService.fullIntegrityCheck(repair);
res.json({
status: 'completed',
result,
});
} catch (error) {
logger.error('Failed to run full integrity check', error);
res.status(500).json({ error: 'Failed to run full integrity check' });
}
});
/**
* GET /api/data-validation/validation-status
* Get current validation status
*/
router.get('/validation-status', async (req: Request, res: Response) => {
try {
const status = await dataIntegrityService.validateReferences();
res.json(status);
} catch (error) {
logger.error('Failed to get validation status', error);
res.status(500).json({ error: 'Failed to get validation status' });
}
});
/**
* GET /api/data-validation/schema-mappings
* Get all schema mappings
*/
router.get('/schema-mappings', async (req: Request, res: Response) => {
try {
const mappings = await schemaMappingService.getAllMappings();
res.json({ mappings });
} catch (error) {
logger.error('Failed to get schema mappings', error);
res.status(500).json({ error: 'Failed to get schema mappings' });
}
});
/**
* POST /api/data-validation/schema-mappings
* Create or update a schema mapping
*/
router.post('/schema-mappings', async (req: Request, res: Response) => {
try {
const { objectTypeName, schemaId, enabled = true } = req.body;
if (!objectTypeName || !schemaId) {
res.status(400).json({ error: 'objectTypeName and schemaId are required' });
return;
}
await schemaMappingService.setMapping(objectTypeName, schemaId, enabled);
schemaMappingService.clearCache(); // Clear cache to reload
res.json({
status: 'success',
message: `Schema mapping updated for ${objectTypeName}`,
});
} catch (error) {
logger.error('Failed to set schema mapping', error);
res.status(500).json({ error: 'Failed to set schema mapping' });
}
});
/**
* DELETE /api/data-validation/schema-mappings/:objectTypeName
* Delete a schema mapping (will use default schema)
*/
router.delete('/schema-mappings/:objectTypeName', async (req: Request, res: Response) => {
try {
const objectTypeName = getParamString(req, 'objectTypeName');
await schemaMappingService.deleteMapping(objectTypeName);
schemaMappingService.clearCache(); // Clear cache to reload
res.json({
status: 'success',
message: `Schema mapping deleted for ${objectTypeName}`,
});
} catch (error) {
logger.error('Failed to delete schema mapping', error);
res.status(500).json({ error: 'Failed to delete schema mapping' });
}
});
/**
* GET /api/data-validation/object-types
* Get all object types with their sync configuration
*/
router.get('/object-types', async (req: Request, res: Response) => {
try {
logger.debug('GET /api/data-validation/object-types - Fetching object types...');
const objectTypes = await schemaMappingService.getAllObjectTypesWithConfig();
logger.info(`GET /api/data-validation/object-types - Returning ${objectTypes.length} object types`);
res.json({ objectTypes });
} catch (error) {
logger.error('Failed to get object types', error);
res.status(500).json({
error: 'Failed to get object types',
details: error instanceof Error ? error.message : String(error)
});
}
});
/**
* PATCH /api/data-validation/object-types/:objectTypeName/enabled
* Enable or disable an object type for syncing
*/
router.patch('/object-types/:objectTypeName/enabled', async (req: Request, res: Response) => {
try {
const objectTypeName = getParamString(req, 'objectTypeName');
const { enabled } = req.body;
if (typeof enabled !== 'boolean') {
res.status(400).json({ error: 'enabled must be a boolean' });
return;
}
await schemaMappingService.setTypeEnabled(objectTypeName, enabled);
schemaMappingService.clearCache();
res.json({
status: 'success',
message: `${objectTypeName} ${enabled ? 'enabled' : 'disabled'} for syncing`,
});
} catch (error) {
logger.error('Failed to update object type enabled status', error);
res.status(500).json({ error: 'Failed to update object type enabled status' });
}
});
export default router;

View File

@@ -1,11 +1,10 @@
import { Router } from 'express';
import { OBJECT_TYPES, SCHEMA_GENERATED_AT, SCHEMA_OBJECT_TYPE_COUNT, SCHEMA_TOTAL_ATTRIBUTES } from '../generated/jira-schema.js';
import type { ObjectTypeDefinition, AttributeDefinition } from '../generated/jira-schema.js';
import { dataService } from '../services/dataService.js';
import { schemaCacheService } from '../services/schemaCacheService.js';
import { schemaSyncService } from '../services/SchemaSyncService.js';
import { schemaMappingService } from '../services/schemaMappingService.js';
import { logger } from '../services/logger.js';
import { jiraAssetsClient } from '../services/jiraAssetsClient.js';
import { requireAuth, requirePermission } from '../middleware/authorization.js';
import type { CMDBObjectTypeName } from '../generated/jira-types.js';
const router = Router();
@@ -13,125 +12,53 @@ const router = Router();
router.use(requireAuth);
router.use(requirePermission('search'));
// Extended types for API response
interface ObjectTypeWithLinks extends ObjectTypeDefinition {
incomingLinks: Array<{
fromType: string;
fromTypeName: string;
attributeName: string;
isMultiple: boolean;
}>;
outgoingLinks: Array<{
toType: string;
toTypeName: string;
attributeName: string;
isMultiple: boolean;
}>;
}
interface SchemaResponse {
metadata: {
generatedAt: string;
objectTypeCount: number;
totalAttributes: number;
};
objectTypes: Record<string, ObjectTypeWithLinks>;
cacheCounts?: Record<string, number>; // Cache counts by type name (from objectsByType)
jiraCounts?: Record<string, number>; // Actual counts from Jira Assets API
}
/**
* GET /api/schema
* Returns the complete Jira Assets schema with object types, attributes, and links
* Data is fetched from database (via cache service)
*/
router.get('/', async (req, res) => {
try {
// Build links between object types
const objectTypesWithLinks: Record<string, ObjectTypeWithLinks> = {};
// Get schema from cache (which fetches from database)
const schema = await schemaCacheService.getSchema();
// First pass: convert all object types
for (const [typeName, typeDef] of Object.entries(OBJECT_TYPES)) {
objectTypesWithLinks[typeName] = {
...typeDef,
incomingLinks: [],
outgoingLinks: [],
};
}
// Optionally fetch Jira counts for comparison (can be slow, so make it optional)
let jiraCounts: Record<string, number> | undefined;
const includeJiraCounts = req.query.includeJiraCounts === 'true';
// Second pass: build link relationships
for (const [typeName, typeDef] of Object.entries(OBJECT_TYPES)) {
for (const attr of typeDef.attributes) {
if (attr.type === 'reference' && attr.referenceTypeName) {
// Add outgoing link from this type
objectTypesWithLinks[typeName].outgoingLinks.push({
toType: attr.referenceTypeName,
toTypeName: OBJECT_TYPES[attr.referenceTypeName]?.name || attr.referenceTypeName,
attributeName: attr.name,
isMultiple: attr.isMultiple,
});
// Add incoming link to the referenced type
if (objectTypesWithLinks[attr.referenceTypeName]) {
objectTypesWithLinks[attr.referenceTypeName].incomingLinks.push({
fromType: typeName,
fromTypeName: typeDef.name,
attributeName: attr.name,
isMultiple: attr.isMultiple,
});
}
if (includeJiraCounts) {
const typeNames = Object.keys(schema.objectTypes);
logger.info(`Schema: Fetching object counts from Jira Assets for ${typeNames.length} object types...`);
jiraCounts = {};
// Fetch counts in parallel for better performance, using schema mappings
const countPromises = typeNames.map(async (typeName) => {
try {
// Get schema ID for this type
const schemaId = await schemaMappingService.getSchemaId(typeName);
const count = await jiraAssetsClient.getObjectCount(typeName, schemaId);
jiraCounts![typeName] = count;
return { typeName, count };
} catch (error) {
logger.warn(`Schema: Failed to get count for ${typeName}`, error);
// Use 0 as fallback if API call fails
jiraCounts![typeName] = 0;
return { typeName, count: 0 };
}
}
});
await Promise.all(countPromises);
logger.info(`Schema: Fetched counts for ${Object.keys(jiraCounts).length} object types from Jira Assets`);
}
// Get cache counts (objectsByType) if available
let cacheCounts: Record<string, number> | undefined;
try {
const cacheStatus = await dataService.getCacheStatus();
cacheCounts = cacheStatus.objectsByType;
} catch (err) {
logger.debug('Could not fetch cache counts for schema response', err);
// Continue without cache counts - not critical
}
// Fetch actual counts from Jira Assets for all object types
// This ensures the counts match exactly what's in Jira Assets
const jiraCounts: Record<string, number> = {};
const typeNames = Object.keys(OBJECT_TYPES) as CMDBObjectTypeName[];
logger.info(`Schema: Fetching object counts from Jira Assets for ${typeNames.length} object types...`);
// Fetch counts in parallel for better performance
const countPromises = typeNames.map(async (typeName) => {
try {
const count = await jiraAssetsClient.getObjectCount(typeName);
jiraCounts[typeName] = count;
return { typeName, count };
} catch (error) {
logger.warn(`Schema: Failed to get count for ${typeName}`, error);
// Use 0 as fallback if API call fails
jiraCounts[typeName] = 0;
return { typeName, count: 0 };
}
});
await Promise.all(countPromises);
logger.info(`Schema: Fetched counts for ${Object.keys(jiraCounts).length} object types from Jira Assets`);
const response: SchemaResponse = {
metadata: {
generatedAt: SCHEMA_GENERATED_AT,
objectTypeCount: SCHEMA_OBJECT_TYPE_COUNT,
totalAttributes: SCHEMA_TOTAL_ATTRIBUTES,
},
objectTypes: objectTypesWithLinks,
cacheCounts,
const response = {
...schema,
jiraCounts,
};
res.json(response);
} catch (error) {
console.error('Failed to get schema:', error);
logger.error('Failed to get schema:', error);
res.status(500).json({ error: 'Failed to get schema' });
}
});
@@ -140,60 +67,62 @@ router.get('/', async (req, res) => {
* GET /api/schema/object-type/:typeName
* Returns details for a specific object type
*/
router.get('/object-type/:typeName', (req, res) => {
const { typeName } = req.params;
const typeDef = OBJECT_TYPES[typeName];
if (!typeDef) {
return res.status(404).json({ error: `Object type '${typeName}' not found` });
}
// Build links for this specific type
const incomingLinks: Array<{
fromType: string;
fromTypeName: string;
attributeName: string;
isMultiple: boolean;
}> = [];
const outgoingLinks: Array<{
toType: string;
toTypeName: string;
attributeName: string;
isMultiple: boolean;
}> = [];
// Outgoing links from this type
for (const attr of typeDef.attributes) {
if (attr.type === 'reference' && attr.referenceTypeName) {
outgoingLinks.push({
toType: attr.referenceTypeName,
toTypeName: OBJECT_TYPES[attr.referenceTypeName]?.name || attr.referenceTypeName,
attributeName: attr.name,
isMultiple: attr.isMultiple,
});
router.get('/object-type/:typeName', async (req, res) => {
try {
const { typeName } = req.params;
// Get schema from cache
const schema = await schemaCacheService.getSchema();
const typeDef = schema.objectTypes[typeName];
if (!typeDef) {
return res.status(404).json({ error: `Object type '${typeName}' not found` });
}
res.json(typeDef);
} catch (error) {
logger.error('Failed to get object type:', error);
res.status(500).json({ error: 'Failed to get object type' });
}
// Incoming links from other types
for (const [otherTypeName, otherTypeDef] of Object.entries(OBJECT_TYPES)) {
for (const attr of otherTypeDef.attributes) {
if (attr.type === 'reference' && attr.referenceTypeName === typeName) {
incomingLinks.push({
fromType: otherTypeName,
fromTypeName: otherTypeDef.name,
attributeName: attr.name,
isMultiple: attr.isMultiple,
});
}
}
});
/**
* POST /api/schema/discover
* Manually trigger schema synchronization from Jira API
* Requires manage_settings permission
*/
router.post('/discover', requirePermission('manage_settings'), async (req, res) => {
try {
logger.info('Schema: Manual schema sync triggered');
const result = await schemaSyncService.syncAll();
schemaCacheService.invalidate(); // Invalidate cache
res.json({
success: result.success,
message: 'Schema synchronization completed',
...result,
});
} catch (error) {
logger.error('Failed to sync schema:', error);
res.status(500).json({
error: 'Failed to sync schema',
details: error instanceof Error ? error.message : String(error),
});
}
});
/**
* GET /api/schema/sync-progress
* Get current sync progress
*/
router.get('/sync-progress', requirePermission('manage_settings'), async (req, res) => {
try {
const progress = schemaSyncService.getProgress();
res.json(progress);
} catch (error) {
logger.error('Failed to get sync progress:', error);
res.status(500).json({ error: 'Failed to get sync progress' });
}
res.json({
...typeDef,
incomingLinks,
outgoingLinks,
});
});
export default router;

View File

@@ -0,0 +1,202 @@
/**
* Schema Configuration routes
*
* Provides endpoints for configuring which object types should be synced.
*/
import { Router, Request, Response } from 'express';
import { logger } from '../services/logger.js';
import { requireAuth, requirePermission } from '../middleware/authorization.js';
import { schemaConfigurationService } from '../services/schemaConfigurationService.js';
import { schemaSyncService } from '../services/SchemaSyncService.js';
const router = Router();
// All routes require authentication and manage_settings permission
router.use(requireAuth);
router.use(requirePermission('manage_settings'));
/**
* GET /api/schema-configuration/stats
* Get configuration statistics
*/
router.get('/stats', async (req: Request, res: Response) => {
try {
const stats = await schemaConfigurationService.getConfigurationStats();
res.json(stats);
} catch (error) {
logger.error('Failed to get configuration stats', error);
res.status(500).json({ error: 'Failed to get configuration stats' });
}
});
/**
* POST /api/schema-configuration/discover
* Discover and store all schemas, object types, and attributes from Jira Assets
* Uses the unified SchemaSyncService
*/
router.post('/discover', async (req: Request, res: Response) => {
try {
logger.info('Schema configuration: Manual schema sync triggered');
const result = await schemaSyncService.syncAll();
if (result.schemasProcessed === 0) {
logger.warn('Schema configuration: Sync returned 0 schemas - this might indicate an API issue');
res.status(400).json({
success: false,
message: 'No schemas found. Please check: 1) JIRA_SERVICE_ACCOUNT_TOKEN is configured correctly, 2) Jira Assets API is accessible, 3) API endpoint /rest/assets/1.0/objectschema/list is available',
...result,
});
return;
}
res.json({
success: result.success,
message: 'Schema synchronization completed successfully',
schemasDiscovered: result.schemasProcessed,
objectTypesDiscovered: result.objectTypesProcessed,
attributesDiscovered: result.attributesProcessed,
...result,
});
} catch (error) {
logger.error('Failed to sync schemas and object types', error);
res.status(500).json({
error: 'Failed to sync schemas and object types',
details: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined
});
}
});
/**
* GET /api/schema-configuration/object-types
* Get all configured object types grouped by schema
*/
router.get('/object-types', async (req: Request, res: Response) => {
try {
const schemas = await schemaConfigurationService.getConfiguredObjectTypes();
res.json({ schemas });
} catch (error) {
logger.error('Failed to get configured object types', error);
res.status(500).json({ error: 'Failed to get configured object types' });
}
});
/**
* PATCH /api/schema-configuration/object-types/:id/enabled
* Enable or disable an object type
*/
router.patch('/object-types/:id/enabled', async (req: Request, res: Response) => {
try {
const id = req.params.id;
const { enabled } = req.body;
if (typeof enabled !== 'boolean') {
res.status(400).json({ error: 'enabled must be a boolean' });
return;
}
await schemaConfigurationService.setObjectTypeEnabled(id, enabled);
res.json({
status: 'success',
message: `Object type ${id} ${enabled ? 'enabled' : 'disabled'}`,
});
} catch (error) {
logger.error('Failed to update object type enabled status', error);
res.status(500).json({ error: 'Failed to update object type enabled status' });
}
});
/**
* PATCH /api/schema-configuration/object-types/bulk-enabled
* Bulk update enabled status for multiple object types
*/
router.patch('/object-types/bulk-enabled', async (req: Request, res: Response) => {
try {
const { updates } = req.body;
if (!Array.isArray(updates)) {
res.status(400).json({ error: 'updates must be an array' });
return;
}
// Validate each update
for (const update of updates) {
if (!update.id || typeof update.enabled !== 'boolean') {
res.status(400).json({ error: 'Each update must have id (string) and enabled (boolean)' });
return;
}
}
await schemaConfigurationService.bulkSetObjectTypesEnabled(updates);
res.json({
status: 'success',
message: `Updated ${updates.length} object types`,
});
} catch (error) {
logger.error('Failed to bulk update object types', error);
res.status(500).json({ error: 'Failed to bulk update object types' });
}
});
/**
* GET /api/schema-configuration/check
* Check if configuration is complete (at least one object type enabled)
*/
router.get('/check', async (req: Request, res: Response) => {
try {
const isComplete = await schemaConfigurationService.isConfigurationComplete();
const stats = await schemaConfigurationService.getConfigurationStats();
res.json({
isConfigured: isComplete,
stats,
});
} catch (error) {
logger.error('Failed to check configuration', error);
res.status(500).json({ error: 'Failed to check configuration' });
}
});
/**
* GET /api/schema-configuration/schemas
* Get all schemas with their search enabled status
*/
router.get('/schemas', async (req: Request, res: Response) => {
try {
const schemas = await schemaConfigurationService.getSchemas();
res.json({ schemas });
} catch (error) {
logger.error('Failed to get schemas', error);
res.status(500).json({ error: 'Failed to get schemas' });
}
});
/**
* PATCH /api/schema-configuration/schemas/:schemaId/search-enabled
* Enable or disable search for a schema
*/
router.patch('/schemas/:schemaId/search-enabled', async (req: Request, res: Response) => {
try {
const schemaId = req.params.schemaId;
const { searchEnabled } = req.body;
if (typeof searchEnabled !== 'boolean') {
res.status(400).json({ error: 'searchEnabled must be a boolean' });
return;
}
await schemaConfigurationService.setSchemaSearchEnabled(schemaId, searchEnabled);
res.json({
status: 'success',
message: `Schema ${schemaId} search ${searchEnabled ? 'enabled' : 'disabled'}`,
});
} catch (error) {
logger.error('Failed to update schema search enabled status', error);
res.status(500).json({ error: 'Failed to update schema search enabled status' });
}
});
export default router;

View File

@@ -0,0 +1,395 @@
/**
* ObjectSyncService - Synchronizes objects from Jira Assets API
*
* Handles:
* - Full sync for enabled types
* - Incremental sync via jira_updated_at
* - Recursive reference processing
* - Reference-only caching for disabled types
*/
import { logger } from './logger.js';
import { jiraAssetsClient } from '../infrastructure/jira/JiraAssetsClient.js';
import { PayloadProcessor, type ProcessedObject } from './PayloadProcessor.js';
import { SchemaRepository } from '../repositories/SchemaRepository.js';
import { ObjectCacheRepository } from '../repositories/ObjectCacheRepository.js';
import type { ObjectEntry } from '../domain/jiraAssetsPayload.js';
import { SyncPolicy } from '../domain/syncPolicy.js';
export interface SyncResult {
objectsProcessed: number;
objectsCached: number;
relationsExtracted: number;
errors: Array<{ objectId: string; error: string }>;
}
export class ObjectSyncService {
private processor: PayloadProcessor;
constructor(
private schemaRepo: SchemaRepository,
private cacheRepo: ObjectCacheRepository
) {
this.processor = new PayloadProcessor(schemaRepo, cacheRepo);
}
/**
* Sync all objects of an enabled type
*/
async syncObjectType(
schemaId: string,
typeId: number,
typeName: string,
displayName: string
): Promise<SyncResult> {
// Validate schemaId before proceeding
if (!schemaId || schemaId.trim() === '') {
const errorMessage = `Schema ID is missing or empty for object type "${displayName}" (${typeName}). Please run schema sync to ensure all object types are properly associated with their schemas.`;
logger.error(`ObjectSyncService: ${errorMessage}`);
return {
objectsProcessed: 0,
objectsCached: 0,
relationsExtracted: 0,
errors: [{
objectId: typeName,
error: errorMessage,
}],
};
}
logger.info(`ObjectSyncService: Starting sync for ${displayName} (${typeName}) from schema ${schemaId}`);
const result: SyncResult = {
objectsProcessed: 0,
objectsCached: 0,
relationsExtracted: 0,
errors: [],
};
try {
// Get enabled types for sync policy
const enabledTypes = await this.schemaRepo.getEnabledObjectTypes();
const enabledTypeSet = new Set(enabledTypes.map(t => t.typeName));
// Fetch all objects of this type
const iql = `objectType = "${displayName}"`;
let page = 1;
let hasMore = true;
const pageSize = 40;
while (hasMore) {
let searchResult;
try {
searchResult = await jiraAssetsClient.searchObjects(iql, schemaId, {
page,
pageSize,
});
} catch (error) {
// Log detailed error information
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
const errorDetails = error instanceof Error && error.cause ? String(error.cause) : undefined;
logger.error(`ObjectSyncService: Failed to search objects for ${typeName}`, {
error: errorMessage,
details: errorDetails,
iql,
schemaId,
page,
});
// Add error to result and return early
result.errors.push({
objectId: typeName,
error: `Failed to fetch objects from Jira: ${errorMessage}. This could be due to network issues, incorrect Jira host URL, or authentication problems. Check backend logs for details.`,
});
// Return result with error instead of throwing (allows partial results to be returned)
return result;
}
if (searchResult.objectEntries.length === 0) {
break;
}
// Process payload recursively (extracts all referenced objects)
const processed = await this.processor.processPayload(
searchResult.objectEntries,
enabledTypeSet
);
// Cache all processed objects
const processedEntries = Array.from(processed.entries());
let cachedCount = 0;
let skippedCount = 0;
logger.info(`ObjectSyncService: Processing ${processedEntries.length} objects from payload (includes root + referenced objects). Root objects: ${searchResult.objectEntries.length}`);
// Group by type for logging
const objectsByType = new Map<string, number>();
for (const [objectId, processedObj] of processedEntries) {
const objType = processedObj.typeName || processedObj.objectEntry.objectType?.name || 'Unknown';
objectsByType.set(objType, (objectsByType.get(objType) || 0) + 1);
}
logger.info(`ObjectSyncService: Objects by type: ${Array.from(objectsByType.entries()).map(([type, count]) => `${type}: ${count}`).join(', ')}`);
for (const [objectId, processedObj] of processedEntries) {
try {
// Cache the object (will use fallback type name if needed)
// cacheProcessedObject should always succeed now due to the generic fallback fix
await this.cacheProcessedObject(processedObj, enabledTypeSet);
// Count all cached objects - cacheProcessedObject should always succeed now
// (it uses a generic fallback type name if no type name is available)
cachedCount++;
result.relationsExtracted += processedObj.objectEntry.attributes?.length || 0;
logger.debug(`ObjectSyncService: Successfully cached object ${processedObj.objectEntry.objectKey} (ID: ${objectId}, type: ${processedObj.typeName || processedObj.objectEntry.objectType?.name || 'fallback'})`);
} catch (error) {
logger.error(`ObjectSyncService: Failed to cache object ${objectId} (${processedObj.objectEntry.objectKey})`, error);
result.errors.push({
objectId,
error: error instanceof Error ? error.message : 'Unknown error',
});
skippedCount++;
}
}
result.objectsCached = cachedCount;
if (skippedCount > 0) {
logger.warn(`ObjectSyncService: Skipped ${skippedCount} objects (no type name available or cache error) out of ${processedEntries.length} processed objects`);
}
logger.info(`ObjectSyncService: Cached ${cachedCount} objects, skipped ${skippedCount} objects out of ${processedEntries.length} total processed objects`);
result.objectsProcessed += searchResult.objectEntries.length;
hasMore = searchResult.hasMore;
page++;
}
logger.info(
`ObjectSyncService: Sync complete for ${displayName} - ${result.objectsProcessed} objects processed, ${result.objectsCached} cached, ${result.errors.length} errors`
);
} catch (error) {
logger.error(`ObjectSyncService: Failed to sync ${displayName}`, error);
result.errors.push({
objectId: typeName,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
return result;
}
/**
* Sync incremental updates (objects updated since timestamp)
* Note: This may not be supported on Jira Data Center
*/
async syncIncremental(
schemaId: string,
since: Date,
enabledTypes: Set<string>
): Promise<SyncResult> {
logger.info(`ObjectSyncService: Starting incremental sync since ${since.toISOString()}`);
const result: SyncResult = {
objectsProcessed: 0,
objectsCached: 0,
relationsExtracted: 0,
errors: [],
};
try {
// IQL for updated objects (may not work on Data Center)
const iql = `updated >= "${since.toISOString()}"`;
const searchResult = await jiraAssetsClient.searchObjects(iql, schemaId, {
page: 1,
pageSize: 100,
});
// Process all entries
const processed = await this.processor.processPayload(searchResult.objectEntries, enabledTypes);
// Cache all processed objects
for (const [objectId, processedObj] of processed.entries()) {
try {
await this.cacheProcessedObject(processedObj, enabledTypes);
result.objectsCached++;
} catch (error) {
logger.error(`ObjectSyncService: Failed to cache object ${objectId} in incremental sync`, error);
result.errors.push({
objectId,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
result.objectsProcessed = searchResult.objectEntries.length;
} catch (error) {
logger.error('ObjectSyncService: Incremental sync failed', error);
result.errors.push({
objectId: 'incremental',
error: error instanceof Error ? error.message : 'Unknown error',
});
}
return result;
}
/**
* Sync a single object (for refresh operations)
*/
async syncSingleObject(
objectId: string,
enabledTypes: Set<string>
): Promise<{ cached: boolean; error?: string }> {
try {
// Fetch object from Jira
const entry = await jiraAssetsClient.getObject(objectId);
if (!entry) {
return { cached: false, error: 'Object not found in Jira' };
}
// Process recursively
const processed = await this.processor.processPayload([entry], enabledTypes);
const processedObj = processed.get(String(entry.id));
if (!processedObj) {
return { cached: false, error: 'Failed to process object' };
}
// Cache object
await this.cacheProcessedObject(processedObj, enabledTypes);
return { cached: true };
} catch (error) {
logger.error(`ObjectSyncService: Failed to sync single object ${objectId}`, error);
return {
cached: false,
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}
/**
* Cache a processed object to database
*/
private async cacheProcessedObject(
processed: ProcessedObject,
enabledTypes: Set<string>
): Promise<void> {
const { objectEntry, typeName, syncPolicy, shouldCacheAttributes } = processed;
// If typeName is not resolved, try to use Jira type name as fallback
// This allows referenced objects to be cached even if their type hasn't been discovered yet
let effectiveTypeName = typeName;
let isFallbackTypeName = false;
if (!effectiveTypeName) {
const jiraTypeId = objectEntry.objectType?.id;
const jiraTypeName = objectEntry.objectType?.name;
if (jiraTypeName) {
// Use Jira type name as fallback (will be stored in object_type_name)
// Generate a PascalCase type name from Jira display name
const { toPascalCase } = await import('./schemaUtils.js');
effectiveTypeName = toPascalCase(jiraTypeName) || jiraTypeName.replace(/[^a-zA-Z0-9]/g, '');
isFallbackTypeName = true;
logger.debug(`ObjectSyncService: Using fallback type name "${effectiveTypeName}" for object ${objectEntry.objectKey} (Jira type ID: ${jiraTypeId}, Jira name: "${jiraTypeName}"). This type needs to be discovered via schema discovery for proper attribute caching.`, {
objectKey: objectEntry.objectKey,
objectId: objectEntry.id,
jiraTypeId,
jiraTypeName,
fallbackTypeName: effectiveTypeName,
});
} else {
// No type name available at all - try to use a generic fallback
// This ensures referenced objects are always cached and queryable
const jiraTypeIdStr = jiraTypeId ? String(jiraTypeId) : 'unknown';
effectiveTypeName = `UnknownType_${jiraTypeIdStr}`;
isFallbackTypeName = true;
logger.warn(`ObjectSyncService: Using generic fallback type name "${effectiveTypeName}" for object ${objectEntry.objectKey} (ID: ${objectEntry.id}, Jira type ID: ${jiraTypeId || 'unknown'}). This object will be cached but may need schema discovery for proper attribute caching.`, {
objectKey: objectEntry.objectKey,
objectId: objectEntry.id,
jiraTypeId,
fallbackTypeName: effectiveTypeName,
hint: 'Run schema discovery to include all object types that are referenced by your synced objects.',
});
}
}
// Use effectiveTypeName for the rest of the function
const typeNameToUse = effectiveTypeName!;
// Normalize object (update processed with effective type name if needed)
let processedForNormalization = processed;
if (isFallbackTypeName) {
processedForNormalization = {
...processed,
typeName: typeNameToUse,
};
}
const normalized = await this.processor.normalizeObject(processedForNormalization);
// Access the database adapter to use transactions
const db = this.cacheRepo.db;
logger.debug(`ObjectSyncService: About to cache object ${objectEntry.objectKey} (ID: ${objectEntry.id}) with type "${typeNameToUse}" (fallback: ${isFallbackTypeName})`);
await db.transaction(async (txDb) => {
const txCacheRepo = new ObjectCacheRepository(txDb);
// Upsert object record (with effective type name)
logger.debug(`ObjectSyncService: Upserting object ${objectEntry.objectKey} (ID: ${objectEntry.id}) with type "${typeNameToUse}" (fallback: ${isFallbackTypeName})`);
await txCacheRepo.upsertObject({
...normalized.objectRecord,
objectTypeName: typeNameToUse,
});
// Handle attributes based on sync policy
// CRITICAL: Only replace attributes if attributes[] was present in API response
// For fallback type names, skip attribute caching (we don't have attribute definitions)
if (!isFallbackTypeName && (syncPolicy === SyncPolicy.ENABLED || syncPolicy === SyncPolicy.REFERENCE_ONLY) && shouldCacheAttributes) {
// Delete existing attributes (full replace)
await txCacheRepo.deleteAttributeValues(normalized.objectRecord.id);
// Insert new attributes
if (normalized.attributeValues.length > 0) {
await txCacheRepo.batchUpsertAttributeValues(
normalized.attributeValues.map(v => ({
...v,
objectId: normalized.objectRecord.id,
}))
);
}
// If attributes[] not present on shallow object, keep existing attributes (don't delete)
} else if (!isFallbackTypeName && (syncPolicy === SyncPolicy.ENABLED || syncPolicy === SyncPolicy.REFERENCE_ONLY)) {
// Cache object metadata even without attributes (reference-only)
// This allows basic object lookups for references
} else if (isFallbackTypeName) {
// For fallback type names, only cache object metadata (no attributes)
// Attributes will be cached once the type is properly discovered
logger.debug(`ObjectSyncService: Skipping attribute caching for object ${objectEntry.objectKey} with fallback type name "${typeNameToUse}". Attributes will be cached after schema discovery.`);
}
// Upsert relations
await txCacheRepo.deleteRelations(normalized.objectRecord.id);
for (const relation of normalized.relations) {
// Resolve target type name
const targetObj = await txCacheRepo.getObject(relation.targetId);
const targetType = targetObj?.objectTypeName || relation.targetType;
await txCacheRepo.upsertRelation({
sourceId: normalized.objectRecord.id,
targetId: relation.targetId,
attributeId: relation.attributeId,
sourceType: normalized.objectRecord.objectTypeName,
targetType,
});
}
});
}
}

View File

@@ -0,0 +1,369 @@
/**
* PayloadProcessor - Recursive processing of Jira Assets API payloads
*
* Handles:
* - Recursive reference expansion (level2, level3, etc.)
* - Cycle detection with visited sets
* - Attribute replacement only when attributes[] present
* - Reference-only caching for disabled types
*/
import { logger } from './logger.js';
import type { ObjectEntry, ReferencedObject, ObjectAttribute, ObjectAttributeValue, ConfluenceValue } from '../domain/jiraAssetsPayload.js';
import { isReferenceValue, isSimpleValue, hasAttributes } from '../domain/jiraAssetsPayload.js';
import type { SyncPolicy } from '../domain/syncPolicy.js';
import { SyncPolicy as SyncPolicyEnum } from '../domain/syncPolicy.js';
import type { SchemaRepository } from '../repositories/SchemaRepository.js';
import type { ObjectCacheRepository } from '../repositories/ObjectCacheRepository.js';
import type { AttributeRecord } from '../repositories/SchemaRepository.js';
export interface ProcessedObject {
objectEntry: ObjectEntry;
typeName: string | null; // Resolved from objectType.id
syncPolicy: SyncPolicy;
shouldCacheAttributes: boolean; // true if attributes[] present
}
export class PayloadProcessor {
constructor(
private schemaRepo: SchemaRepository,
private cacheRepo: ObjectCacheRepository
) {}
/**
* Process a payload recursively, extracting all objects
*
* @param objectEntries - Root objects from API
* @param enabledTypes - Set of enabled type names for full sync
* @returns Map of objectId -> ProcessedObject (includes recursive references)
*/
async processPayload(
objectEntries: ObjectEntry[],
enabledTypes: Set<string>
): Promise<Map<string, ProcessedObject>> {
const processed = new Map<string, ProcessedObject>();
const visited = new Set<string>(); // objectId/objectKey for cycle detection
// Process root entries
for (const entry of objectEntries) {
await this.processEntryRecursive(entry, enabledTypes, processed, visited);
}
return processed;
}
/**
* Process a single entry recursively
*/
private async processEntryRecursive(
entry: ObjectEntry | ReferencedObject,
enabledTypes: Set<string>,
processed: Map<string, ProcessedObject>,
visited: Set<string>
): Promise<void> {
// Extract ID and key for cycle detection
const objectId = String(entry.id);
const objectKey = entry.objectKey;
// Check for cycles (use both ID and key as visited can have either)
const visitedKey = `${objectId}:${objectKey}`;
if (visited.has(visitedKey)) {
logger.debug(`PayloadProcessor: Cycle detected for ${objectKey} (${objectId}), skipping`);
return;
}
visited.add(visitedKey);
// Resolve type name from Jira type ID
const typeName = await this.resolveTypeName(entry.objectType.id);
const syncPolicy = this.getSyncPolicy(typeName, enabledTypes);
// Determine if we should cache attributes
// CRITICAL: Only replace attributes if attributes[] array is present
const shouldCacheAttributes = hasAttributes(entry);
// Store processed object (always update if already exists to ensure latest data)
// Convert ReferencedObject to ObjectEntry format for storage
const objectEntry: ObjectEntry = {
id: entry.id,
objectKey: entry.objectKey,
label: entry.label,
objectType: entry.objectType,
created: entry.created,
updated: entry.updated,
hasAvatar: entry.hasAvatar,
timestamp: entry.timestamp,
attributes: hasAttributes(entry) ? entry.attributes : undefined,
};
processed.set(objectId, {
objectEntry,
typeName,
syncPolicy,
shouldCacheAttributes,
});
logger.debug(`PayloadProcessor: Added object ${objectEntry.objectKey} (ID: ${objectId}, Jira type: ${entry.objectType?.name}, resolved type: ${typeName || 'null'}) to processed map. Total processed: ${processed.size}`);
// Process recursive references if attributes are present
if (hasAttributes(entry)) {
logger.debug(`PayloadProcessor: Processing ${entry.attributes!.length} attributes for recursive references in object ${objectEntry.objectKey} (ID: ${objectId})`);
await this.processRecursiveReferences(
entry.attributes!,
enabledTypes,
processed,
visited
);
} else {
logger.debug(`PayloadProcessor: Object ${objectEntry.objectKey} (ID: ${objectId}) has no attributes array, skipping recursive processing`);
}
// Remove from visited set when done (allows same object in different contexts)
visited.delete(visitedKey);
}
/**
* Process recursive references from attributes
*/
private async processRecursiveReferences(
attributes: ObjectAttribute[],
enabledTypes: Set<string>,
processed: Map<string, ProcessedObject>,
visited: Set<string>
): Promise<void> {
for (const attr of attributes) {
for (const value of attr.objectAttributeValues) {
if (isReferenceValue(value)) {
const refObj = value.referencedObject;
// Process referenced object recursively
// This handles level2, level3, etc. expansion
await this.processEntryRecursive(refObj, enabledTypes, processed, visited);
}
}
}
}
/**
* Resolve type name from Jira type ID
*/
private async resolveTypeName(jiraTypeId: number): Promise<string | null> {
const objectType = await this.schemaRepo.getObjectTypeByJiraId(jiraTypeId);
if (!objectType) {
// Track missing type IDs for diagnostics
logger.debug(`PayloadProcessor: Jira type ID ${jiraTypeId} not found in object_types table. This type needs to be discovered via schema sync.`);
return null;
}
return objectType.typeName || null;
}
/**
* Get sync policy for a type
*/
private getSyncPolicy(typeName: string | null, enabledTypes: Set<string>): SyncPolicy {
if (!typeName) {
return SyncPolicyEnum.SKIP; // Unknown type - skip
}
if (enabledTypes.has(typeName)) {
return SyncPolicyEnum.ENABLED;
}
// Reference-only: cache minimal metadata for references
return SyncPolicyEnum.REFERENCE_ONLY;
}
/**
* Normalize an object entry to database format
* This converts ObjectEntry to EAV format
*/
async normalizeObject(
processed: ProcessedObject
): Promise<{
objectRecord: {
id: string;
objectKey: string;
objectTypeName: string;
label: string;
jiraUpdatedAt: string;
jiraCreatedAt: string;
};
attributeValues: Array<{
attributeId: number;
textValue?: string | null;
numberValue?: number | null;
booleanValue?: boolean | null;
dateValue?: string | null;
datetimeValue?: string | null;
referenceObjectId?: string | null;
referenceObjectKey?: string | null;
referenceObjectLabel?: string | null;
arrayIndex: number;
}>;
relations: Array<{
targetId: string;
attributeId: number;
targetType: string;
}>;
}> {
const { objectEntry, typeName } = processed;
if (!typeName) {
throw new Error(`Cannot normalize object ${objectEntry.objectKey}: type name not resolved`);
}
// Get attributes for this type
const attributeDefs = await this.schemaRepo.getAttributesForType(typeName);
const attrMap = new Map(attributeDefs.map(a => [a.jiraAttrId, a]));
// Extract object record
const objectRecord = {
id: String(objectEntry.id),
objectKey: objectEntry.objectKey,
objectTypeName: typeName,
label: objectEntry.label,
jiraUpdatedAt: objectEntry.updated,
jiraCreatedAt: objectEntry.created,
};
// Normalize attributes
const attributeValues: Array<{
attributeId: number;
textValue?: string | null;
numberValue?: number | null;
booleanValue?: boolean | null;
dateValue?: string | null;
datetimeValue?: string | null;
referenceObjectId?: string | null;
referenceObjectKey?: string | null;
referenceObjectLabel?: string | null;
arrayIndex: number;
}> = [];
const relations: Array<{
targetId: string;
attributeId: number;
targetType: string;
}> = [];
// Process attributes if present
if (hasAttributes(objectEntry) && objectEntry.attributes) {
for (const attr of objectEntry.attributes) {
const attrDef = attrMap.get(attr.objectTypeAttributeId);
if (!attrDef) {
logger.warn(`PayloadProcessor: Unknown attribute ID ${attr.objectTypeAttributeId} for type ${typeName}`);
continue;
}
// Process attribute values
for (let arrayIndex = 0; arrayIndex < attr.objectAttributeValues.length; arrayIndex++) {
const value = attr.objectAttributeValues[arrayIndex];
// Normalize based on value type
const normalizedValue = this.normalizeAttributeValue(value, attrDef, objectRecord.id, relations);
attributeValues.push({
attributeId: attrDef.id,
...normalizedValue,
arrayIndex: attrDef.isMultiple ? arrayIndex : 0,
});
}
}
}
return {
objectRecord,
attributeValues,
relations,
};
}
/**
* Normalize a single attribute value
*/
private normalizeAttributeValue(
value: ObjectAttributeValue,
attrDef: AttributeRecord,
sourceObjectId: string,
relations: Array<{ targetId: string; attributeId: number; targetType: string }>
): {
textValue?: string | null;
numberValue?: number | null;
booleanValue?: boolean | null;
dateValue?: string | null;
datetimeValue?: string | null;
referenceObjectId?: string | null;
referenceObjectKey?: string | null;
referenceObjectLabel?: string | null;
} {
// Handle reference values
if (isReferenceValue(value)) {
const ref = value.referencedObject;
const refId = String(ref.id);
// Extract relation
// Note: targetType will be resolved later from ref.objectType.id
relations.push({
targetId: refId,
attributeId: attrDef.id,
targetType: ref.objectType.name, // Will be resolved to typeName during store
});
return {
referenceObjectId: refId,
referenceObjectKey: ref.objectKey,
referenceObjectLabel: ref.label,
};
}
// Handle simple values
if (isSimpleValue(value)) {
const val = value.value;
switch (attrDef.attrType) {
case 'text':
case 'textarea':
case 'url':
case 'email':
case 'select':
case 'user':
case 'status':
return { textValue: String(val) };
case 'integer':
return { numberValue: typeof val === 'number' ? val : parseInt(String(val), 10) };
case 'float':
return { numberValue: typeof val === 'number' ? val : parseFloat(String(val)) };
case 'boolean':
return { booleanValue: Boolean(val) };
case 'date':
return { dateValue: String(val) };
case 'datetime':
return { datetimeValue: String(val) };
default:
return { textValue: String(val) };
}
}
// Handle status values
if ('status' in value && value.status) {
return { textValue: value.status.name };
}
// Handle Confluence values
if ('confluencePage' in value && value.confluencePage) {
const confluenceVal = value as ConfluenceValue;
return { textValue: confluenceVal.confluencePage.url || confluenceVal.displayValue };
}
// Handle user values
if ('user' in value && value.user) {
return { textValue: value.user.displayName || value.user.name || value.displayValue };
}
// Fallback to displayValue
return { textValue: value.displayValue || null };
}
}

View File

@@ -0,0 +1,240 @@
/**
* QueryService - Universal query builder (DB → TypeScript)
*
* Reconstructs TypeScript objects from normalized EAV database.
*/
import { logger } from './logger.js';
import { SchemaRepository } from '../repositories/SchemaRepository.js';
import { ObjectCacheRepository } from '../repositories/ObjectCacheRepository.js';
import type { CMDBObject, CMDBObjectTypeName } from '../generated/jira-types.js';
import type { AttributeRecord } from '../repositories/SchemaRepository.js';
export interface QueryOptions {
limit?: number;
offset?: number;
orderBy?: string;
orderDir?: 'ASC' | 'DESC';
searchTerm?: string;
}
export class QueryService {
constructor(
private schemaRepo: SchemaRepository,
private cacheRepo: ObjectCacheRepository
) {}
/**
* Get a single object by ID
*/
async getObject<T extends CMDBObject>(
typeName: CMDBObjectTypeName,
id: string
): Promise<T | null> {
// Get object record
const objRecord = await this.cacheRepo.getObject(id);
if (!objRecord || objRecord.objectTypeName !== typeName) {
return null;
}
// Reconstruct object from EAV data
return await this.reconstructObject<T>(objRecord);
}
/**
* Get objects of a type with filters
*/
async getObjects<T extends CMDBObject>(
typeName: CMDBObjectTypeName,
options: QueryOptions = {}
): Promise<T[]> {
const { limit = 1000, offset = 0 } = options;
logger.debug(`QueryService.getObjects: Querying for typeName="${typeName}" with limit=${limit}, offset=${offset}`);
// Get object records
const objRecords = await this.cacheRepo.getObjectsByType(typeName, { limit, offset });
logger.debug(`QueryService.getObjects: Found ${objRecords.length} object records for typeName="${typeName}"`);
// Check if no records found - might be a type name mismatch
if (objRecords.length === 0) {
// Diagnostic: Check what object_type_name values actually exist in the database
const db = this.cacheRepo.db;
try {
const allTypeNames = await db.query<{ object_type_name: string; count: number }>(
`SELECT object_type_name, COUNT(*) as count
FROM objects
GROUP BY object_type_name
ORDER BY count DESC
LIMIT 20`
);
logger.warn(`QueryService.getObjects: No objects found for typeName="${typeName}". Available object_type_name values in database:`, {
requestedType: typeName,
availableTypes: allTypeNames.map(t => ({ typeName: t.object_type_name, count: t.count })),
totalTypes: allTypeNames.length,
hint: 'The typeName might not match the object_type_name stored in the database. Check for case sensitivity or naming differences.',
});
} catch (error) {
logger.debug('QueryService.getObjects: Failed to query available type names', error);
}
}
// Reconstruct all objects
const objects = await Promise.all(
objRecords.map(record => this.reconstructObject<T>(record))
);
// Filter out nulls and type assert
const validObjects = objects.filter(obj => obj !== null && obj !== undefined);
logger.debug(`QueryService.getObjects: Successfully reconstructed ${validObjects.length}/${objRecords.length} objects for typeName="${typeName}"`);
return validObjects as T[];
}
/**
* Count objects of a type
*/
async countObjects(typeName: CMDBObjectTypeName): Promise<number> {
return await this.cacheRepo.countObjectsByType(typeName);
}
/**
* Search objects by label
*/
async searchByLabel<T extends CMDBObject>(
typeName: CMDBObjectTypeName,
searchTerm: string,
options: QueryOptions = {}
): Promise<T[]> {
const { limit = 100, offset = 0 } = options;
// Get object records with label filter
const objRecords = await this.cacheRepo.db.query<{
id: string;
objectKey: string;
objectTypeName: string;
label: string;
jiraUpdatedAt: string | null;
jiraCreatedAt: string | null;
cachedAt: string;
}>(
`SELECT id, object_key as objectKey, object_type_name as objectTypeName, label,
jira_updated_at as jiraUpdatedAt, jira_created_at as jiraCreatedAt, cached_at as cachedAt
FROM objects
WHERE object_type_name = ? AND LOWER(label) LIKE LOWER(?)
ORDER BY label ASC
LIMIT ? OFFSET ?`,
[typeName, `%${searchTerm}%`, limit, offset]
);
// Reconstruct objects
const objects = await Promise.all(
objRecords.map(record => this.reconstructObject<T>(record))
);
// Filter out nulls and type assert
const validObjects = objects.filter(obj => obj !== null && obj !== undefined);
return validObjects as T[];
}
/**
* Reconstruct a TypeScript object from database records
*/
private async reconstructObject<T extends CMDBObject>(
objRecord: {
id: string;
objectKey: string;
objectTypeName: string;
label: string;
jiraUpdatedAt: string | null;
jiraCreatedAt: string | null;
}
): Promise<T | null> {
// Get attribute definitions for this type
const attributeDefs = await this.schemaRepo.getAttributesForType(objRecord.objectTypeName);
const attrMap = new Map(attributeDefs.map(a => [a.id, a]));
// Get attribute values
const attributeValues = await this.cacheRepo.getAttributeValues(objRecord.id);
// Build attribute map: fieldName -> value(s)
const attributes: Record<string, unknown> = {};
for (const attrValue of attributeValues) {
const attrDef = attrMap.get(attrValue.attributeId);
if (!attrDef) {
logger.warn(`QueryService: Unknown attribute ID ${attrValue.attributeId} for object ${objRecord.id}`);
continue;
}
// Extract value based on type
let value: unknown = null;
switch (attrDef.attrType) {
case 'reference':
if (attrValue.referenceObjectId) {
value = {
objectId: attrValue.referenceObjectId,
objectKey: attrValue.referenceObjectKey || '',
label: attrValue.referenceObjectLabel || '',
};
}
break;
case 'text':
case 'textarea':
case 'url':
case 'email':
case 'select':
case 'user':
case 'status':
value = attrValue.textValue;
break;
case 'integer':
case 'float':
value = attrValue.numberValue;
break;
case 'boolean':
value = attrValue.booleanValue;
break;
case 'date':
value = attrValue.dateValue;
break;
case 'datetime':
value = attrValue.datetimeValue;
break;
default:
value = attrValue.textValue;
}
// Handle arrays vs single values
if (attrDef.isMultiple) {
if (!attributes[attrDef.fieldName]) {
attributes[attrDef.fieldName] = [];
}
(attributes[attrDef.fieldName] as unknown[]).push(value);
} else {
attributes[attrDef.fieldName] = value;
}
}
// Build CMDBObject
const result: Record<string, unknown> = {
id: objRecord.id,
objectKey: objRecord.objectKey,
label: objRecord.label,
_objectType: objRecord.objectTypeName,
_jiraUpdatedAt: objRecord.jiraUpdatedAt || new Date().toISOString(),
_jiraCreatedAt: objRecord.jiraCreatedAt || new Date().toISOString(),
...attributes,
};
return result as T;
}
}

View File

@@ -0,0 +1,75 @@
/**
* RefreshService - Handles force-refresh-on-read with deduping/locks
*
* Prevents duplicate refresh operations for the same object.
*/
import { logger } from './logger.js';
import { jiraAssetsClient } from '../infrastructure/jira/JiraAssetsClient.js';
import { ObjectSyncService } from './ObjectSyncService.js';
import { SchemaRepository } from '../repositories/SchemaRepository.js';
export class RefreshService {
private refreshLocks: Map<string, Promise<void>> = new Map();
private readonly LOCK_TIMEOUT_MS = 30000; // 30 seconds
constructor(private syncService: ObjectSyncService) {}
/**
* Refresh a single object with deduplication
* If another refresh is already in progress, wait for it
*/
async refreshObject(
objectId: string,
enabledTypes: Set<string>
): Promise<{ success: boolean; error?: string }> {
// Check if refresh already in progress
const existingLock = this.refreshLocks.get(objectId);
if (existingLock) {
logger.debug(`RefreshService: Refresh already in progress for ${objectId}, waiting...`);
try {
await existingLock;
return { success: true }; // Previous refresh succeeded
} catch (error) {
logger.warn(`RefreshService: Previous refresh failed for ${objectId}, retrying...`, error);
// Continue to new refresh
}
}
// Create new refresh promise
const refreshPromise = this.doRefresh(objectId, enabledTypes);
this.refreshLocks.set(objectId, refreshPromise);
try {
// Add timeout to prevent locks from hanging forever
const timeoutPromise = new Promise<void>((_, reject) => {
setTimeout(() => reject(new Error('Refresh timeout')), this.LOCK_TIMEOUT_MS);
});
await Promise.race([refreshPromise, timeoutPromise]);
return { success: true };
} catch (error) {
logger.error(`RefreshService: Failed to refresh object ${objectId}`, error);
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
};
} finally {
// Clean up lock after a delay (allow concurrent reads)
setTimeout(() => {
this.refreshLocks.delete(objectId);
}, 1000);
}
}
/**
* Perform the actual refresh
*/
private async doRefresh(objectId: string, enabledTypes: Set<string>): Promise<void> {
const result = await this.syncService.syncSingleObject(objectId, enabledTypes);
if (!result.cached) {
throw new Error(result.error || 'Failed to cache object');
}
}
}

View File

@@ -0,0 +1,817 @@
/**
* Schema Sync Service
*
* Unified service for synchronizing Jira Assets schema configuration to local database.
* Implements the complete sync flow as specified in the refactor plan.
*/
import { logger } from './logger.js';
import { getDatabaseAdapter } from './database/singleton.js';
import type { DatabaseAdapter } from './database/interface.js';
import { config } from '../config/env.js';
import { toCamelCase, toPascalCase, mapJiraType, determineSyncPriority } from './schemaUtils.js';
// =============================================================================
// Types
// =============================================================================
interface JiraSchema {
id: number;
name: string;
objectSchemaKey?: string;
status?: string;
description?: string;
created?: string;
updated?: string;
objectCount?: number;
objectTypeCount?: number;
}
interface JiraObjectType {
id: number;
name: string;
type?: number;
description?: string;
icon?: {
id: number;
name: string;
url16?: string;
url48?: string;
};
position?: number;
created?: string;
updated?: string;
objectCount?: number;
parentObjectTypeId?: number | null;
objectSchemaId: number;
inherited?: boolean;
abstractObjectType?: boolean;
}
interface JiraAttribute {
id: number;
objectType?: {
id: number;
name: string;
};
name: string;
label?: boolean;
type: number;
description?: string;
defaultType?: {
id: number;
name: string;
} | null;
typeValue?: string | null;
typeValueMulti?: string[];
additionalValue?: string | null;
referenceType?: {
id: number;
name: string;
description?: string;
color?: string;
url16?: string | null;
removable?: boolean;
objectSchemaId?: number;
} | null;
referenceObjectTypeId?: number | null;
referenceObjectType?: {
id: number;
name: string;
objectSchemaId?: number;
} | null;
editable?: boolean;
system?: boolean;
sortable?: boolean;
summable?: boolean;
indexed?: boolean;
minimumCardinality?: number;
maximumCardinality?: number;
suffix?: string;
removable?: boolean;
hidden?: boolean;
includeChildObjectTypes?: boolean;
uniqueAttribute?: boolean;
regexValidation?: string | null;
iql?: string | null;
options?: string;
position?: number;
}
export interface SyncResult {
success: boolean;
schemasProcessed: number;
objectTypesProcessed: number;
attributesProcessed: number;
schemasDeleted: number;
objectTypesDeleted: number;
attributesDeleted: number;
errors: SyncError[];
duration: number; // milliseconds
}
export interface SyncError {
type: 'schema' | 'objectType' | 'attribute';
id: string | number;
message: string;
}
export interface SyncProgress {
status: 'idle' | 'running' | 'completed' | 'failed';
currentSchema?: string;
currentObjectType?: string;
schemasTotal: number;
schemasCompleted: number;
objectTypesTotal: number;
objectTypesCompleted: number;
startedAt?: Date;
estimatedCompletion?: Date;
}
// =============================================================================
// SchemaSyncService Implementation
// =============================================================================
class SchemaSyncService {
private db: DatabaseAdapter;
private isPostgres: boolean;
private baseUrl: string;
private progress: SyncProgress = {
status: 'idle',
schemasTotal: 0,
schemasCompleted: 0,
objectTypesTotal: 0,
objectTypesCompleted: 0,
};
// Rate limiting configuration
private readonly RATE_LIMIT_DELAY_MS = 150; // 150ms between requests
private readonly MAX_RETRIES = 3;
private readonly RETRY_DELAY_MS = 1000;
constructor() {
this.db = getDatabaseAdapter();
this.isPostgres = (this.db.isPostgres === true);
this.baseUrl = `${config.jiraHost}/rest/assets/1.0`;
}
/**
* Get authentication headers for API requests
*/
private getHeaders(): Record<string, string> {
const token = config.jiraServiceAccountToken;
if (!token) {
throw new Error('JIRA_SERVICE_ACCOUNT_TOKEN not configured. Schema sync requires a service account token.');
}
return {
'Authorization': `Bearer ${token}`,
'Content-Type': 'application/json',
'Accept': 'application/json',
};
}
/**
* Rate limiting delay
*/
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* Fetch with rate limiting and retry logic
*/
private async fetchWithRateLimit<T>(
url: string,
retries: number = this.MAX_RETRIES
): Promise<T> {
await this.delay(this.RATE_LIMIT_DELAY_MS);
try {
const response = await fetch(url, {
headers: this.getHeaders(),
});
// Handle rate limiting (429)
if (response.status === 429) {
const retryAfter = parseInt(response.headers.get('Retry-After') || '5', 10);
logger.warn(`SchemaSync: Rate limited, waiting ${retryAfter}s before retry`);
await this.delay(retryAfter * 1000);
return this.fetchWithRateLimit<T>(url, retries);
}
// Handle server errors with retry
if (response.status >= 500 && retries > 0) {
logger.warn(`SchemaSync: Server error ${response.status}, retrying (${retries} attempts left)`);
await this.delay(this.RETRY_DELAY_MS);
return this.fetchWithRateLimit<T>(url, retries - 1);
}
if (!response.ok) {
const text = await response.text();
throw new Error(`HTTP ${response.status}: ${text}`);
}
return await response.json() as T;
} catch (error) {
if (retries > 0 && error instanceof Error && !error.message.includes('HTTP')) {
logger.warn(`SchemaSync: Network error, retrying (${retries} attempts left)`, error);
await this.delay(this.RETRY_DELAY_MS);
return this.fetchWithRateLimit<T>(url, retries - 1);
}
throw error;
}
}
/**
* Fetch all schemas from Jira
*/
private async fetchSchemas(): Promise<JiraSchema[]> {
const url = `${this.baseUrl}/objectschema/list`;
logger.debug(`SchemaSync: Fetching schemas from ${url}`);
const result = await this.fetchWithRateLimit<{ objectschemas?: JiraSchema[] } | JiraSchema[]>(url);
// Handle different response formats
if (Array.isArray(result)) {
return result;
} else if (result && typeof result === 'object' && 'objectschemas' in result) {
return result.objectschemas || [];
}
logger.warn('SchemaSync: Unexpected schema list response format', result);
return [];
}
/**
* Fetch schema details
*/
private async fetchSchemaDetails(schemaId: number): Promise<JiraSchema> {
const url = `${this.baseUrl}/objectschema/${schemaId}`;
logger.debug(`SchemaSync: Fetching schema details for ${schemaId}`);
return await this.fetchWithRateLimit<JiraSchema>(url);
}
/**
* Fetch all object types for a schema (flat list)
*/
private async fetchObjectTypes(schemaId: number): Promise<JiraObjectType[]> {
const url = `${this.baseUrl}/objectschema/${schemaId}/objecttypes/flat`;
logger.debug(`SchemaSync: Fetching object types for schema ${schemaId}`);
try {
const result = await this.fetchWithRateLimit<JiraObjectType[]>(url);
return Array.isArray(result) ? result : [];
} catch (error) {
// Fallback to regular endpoint if flat endpoint fails
logger.warn(`SchemaSync: Flat endpoint failed, trying regular endpoint`, error);
const fallbackUrl = `${this.baseUrl}/objectschema/${schemaId}/objecttypes`;
const fallbackResult = await this.fetchWithRateLimit<{ objectTypes?: JiraObjectType[] } | JiraObjectType[]>(fallbackUrl);
if (Array.isArray(fallbackResult)) {
return fallbackResult;
} else if (fallbackResult && typeof fallbackResult === 'object' && 'objectTypes' in fallbackResult) {
return fallbackResult.objectTypes || [];
}
return [];
}
}
/**
* Fetch object type details
*/
private async fetchObjectTypeDetails(typeId: number): Promise<JiraObjectType> {
const url = `${this.baseUrl}/objecttype/${typeId}`;
logger.debug(`SchemaSync: Fetching object type details for ${typeId}`);
return await this.fetchWithRateLimit<JiraObjectType>(url);
}
/**
* Fetch attributes for an object type
*/
private async fetchAttributes(typeId: number): Promise<JiraAttribute[]> {
const url = `${this.baseUrl}/objecttype/${typeId}/attributes`;
logger.debug(`SchemaSync: Fetching attributes for object type ${typeId}`);
try {
const result = await this.fetchWithRateLimit<JiraAttribute[]>(url);
return Array.isArray(result) ? result : [];
} catch (error) {
logger.warn(`SchemaSync: Failed to fetch attributes for type ${typeId}`, error);
return [];
}
}
/**
* Parse Jira attribute to database format
*/
private parseAttribute(
attr: JiraAttribute,
allTypeConfigs: Map<number, { name: string; typeName: string }>
): {
jiraId: number;
name: string;
fieldName: string;
type: string;
isMultiple: boolean;
isEditable: boolean;
isRequired: boolean;
isSystem: boolean;
referenceTypeName?: string;
description?: string;
// Additional fields from plan
label?: boolean;
sortable?: boolean;
summable?: boolean;
indexed?: boolean;
suffix?: string;
removable?: boolean;
hidden?: boolean;
includeChildObjectTypes?: boolean;
uniqueAttribute?: boolean;
regexValidation?: string | null;
iql?: string | null;
options?: string;
position?: number;
} {
const typeId = attr.type || attr.defaultType?.id || 0;
let type = mapJiraType(typeId);
const isMultiple = (attr.maximumCardinality ?? 1) > 1 || attr.maximumCardinality === -1;
const isEditable = attr.editable !== false && !attr.hidden;
const isRequired = (attr.minimumCardinality ?? 0) > 0;
const isSystem = attr.system === true;
// CRITICAL: Jira sometimes returns type=1 (integer) for reference attributes!
// The presence of referenceObjectTypeId is the true indicator of a reference type.
const refTypeId = attr.referenceObjectTypeId || attr.referenceObject?.id || attr.referenceType?.id;
if (refTypeId) {
type = 'reference';
}
const result: ReturnType<typeof this.parseAttribute> = {
jiraId: attr.id,
name: attr.name,
fieldName: toCamelCase(attr.name),
type,
isMultiple,
isEditable,
isRequired,
isSystem,
description: attr.description,
label: attr.label,
sortable: attr.sortable,
summable: attr.summable,
indexed: attr.indexed,
suffix: attr.suffix,
removable: attr.removable,
hidden: attr.hidden,
includeChildObjectTypes: attr.includeChildObjectTypes,
uniqueAttribute: attr.uniqueAttribute,
regexValidation: attr.regexValidation,
iql: attr.iql,
options: attr.options,
position: attr.position,
};
// Handle reference types - add reference metadata
if (type === 'reference' && refTypeId) {
const refConfig = allTypeConfigs.get(refTypeId);
result.referenceTypeName = refConfig?.typeName ||
attr.referenceObjectType?.name ||
attr.referenceType?.name ||
`Type${refTypeId}`;
}
return result;
}
/**
* Sync all schemas and their complete structure
*/
async syncAll(): Promise<SyncResult> {
const startTime = Date.now();
const errors: SyncError[] = [];
this.progress = {
status: 'running',
schemasTotal: 0,
schemasCompleted: 0,
objectTypesTotal: 0,
objectTypesCompleted: 0,
startedAt: new Date(),
};
try {
logger.info('SchemaSync: Starting full schema synchronization...');
// Step 1: Fetch all schemas
const schemas = await this.fetchSchemas();
this.progress.schemasTotal = schemas.length;
logger.info(`SchemaSync: Found ${schemas.length} schemas to sync`);
if (schemas.length === 0) {
throw new Error('No schemas found in Jira Assets');
}
// Track Jira IDs for cleanup
const jiraSchemaIds = new Set<string>();
const jiraObjectTypeIds = new Map<string, Set<number>>(); // schemaId -> Set<typeId>
const jiraAttributeIds = new Map<string, Set<number>>(); // typeName -> Set<attrId>
let schemasProcessed = 0;
let objectTypesProcessed = 0;
let attributesProcessed = 0;
let schemasDeleted = 0;
let objectTypesDeleted = 0;
let attributesDeleted = 0;
await this.db.transaction(async (txDb) => {
// Step 2: Process each schema
for (const schema of schemas) {
try {
this.progress.currentSchema = schema.name;
const schemaIdStr = schema.id.toString();
jiraSchemaIds.add(schemaIdStr);
// Fetch schema details
let schemaDetails: JiraSchema;
try {
schemaDetails = await this.fetchSchemaDetails(schema.id);
} catch (error) {
logger.warn(`SchemaSync: Failed to fetch details for schema ${schema.id}, using list data`, error);
schemaDetails = schema;
}
const now = new Date().toISOString();
const objectSchemaKey = schemaDetails.objectSchemaKey || schemaDetails.name || schemaIdStr;
// Upsert schema
if (txDb.isPostgres) {
await txDb.execute(`
INSERT INTO schemas (jira_schema_id, name, object_schema_key, status, description, discovered_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(jira_schema_id) DO UPDATE SET
name = excluded.name,
object_schema_key = excluded.object_schema_key,
status = excluded.status,
description = excluded.description,
updated_at = excluded.updated_at
`, [
schemaIdStr,
schemaDetails.name,
objectSchemaKey,
schemaDetails.status || null,
schemaDetails.description || null,
now,
now,
]);
} else {
await txDb.execute(`
INSERT INTO schemas (jira_schema_id, name, object_schema_key, status, description, discovered_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(jira_schema_id) DO UPDATE SET
name = excluded.name,
object_schema_key = excluded.object_schema_key,
status = excluded.status,
description = excluded.description,
updated_at = excluded.updated_at
`, [
schemaIdStr,
schemaDetails.name,
objectSchemaKey,
schemaDetails.status || null,
schemaDetails.description || null,
now,
now,
]);
}
// Get schema FK
const schemaRow = await txDb.queryOne<{ id: number }>(
`SELECT id FROM schemas WHERE jira_schema_id = ?`,
[schemaIdStr]
);
if (!schemaRow) {
throw new Error(`Failed to get schema FK for ${schemaIdStr}`);
}
const schemaIdFk = schemaRow.id;
// Step 3: Fetch all object types for this schema
const objectTypes = await this.fetchObjectTypes(schema.id);
logger.info(`SchemaSync: Found ${objectTypes.length} object types in schema ${schema.name}`);
const typeConfigs = new Map<number, { name: string; typeName: string }>();
jiraObjectTypeIds.set(schemaIdStr, new Set());
// Build type name mapping
for (const objType of objectTypes) {
const typeName = toPascalCase(objType.name);
typeConfigs.set(objType.id, {
name: objType.name,
typeName,
});
jiraObjectTypeIds.get(schemaIdStr)!.add(objType.id);
}
// Step 4: Store object types
for (const objType of objectTypes) {
try {
this.progress.currentObjectType = objType.name;
const typeName = toPascalCase(objType.name);
const objectCount = objType.objectCount || 0;
const syncPriority = determineSyncPriority(objType.name, objectCount);
// Upsert object type
if (txDb.isPostgres) {
await txDb.execute(`
INSERT INTO object_types (
schema_id, jira_type_id, type_name, display_name, description,
sync_priority, object_count, enabled, discovered_at, updated_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(schema_id, jira_type_id) DO UPDATE SET
display_name = excluded.display_name,
description = excluded.description,
sync_priority = excluded.sync_priority,
object_count = excluded.object_count,
updated_at = excluded.updated_at
`, [
schemaIdFk,
objType.id,
typeName,
objType.name,
objType.description || null,
syncPriority,
objectCount,
false, // Default: disabled
now,
now,
]);
} else {
await txDb.execute(`
INSERT INTO object_types (
schema_id, jira_type_id, type_name, display_name, description,
sync_priority, object_count, enabled, discovered_at, updated_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(schema_id, jira_type_id) DO UPDATE SET
display_name = excluded.display_name,
description = excluded.description,
sync_priority = excluded.sync_priority,
object_count = excluded.object_count,
updated_at = excluded.updated_at
`, [
schemaIdFk,
objType.id,
typeName,
objType.name,
objType.description || null,
syncPriority,
objectCount,
0, // Default: disabled (0 = false in SQLite)
now,
now,
]);
}
objectTypesProcessed++;
// Step 5: Fetch and store attributes
const attributes = await this.fetchAttributes(objType.id);
logger.info(`SchemaSync: Fetched ${attributes.length} attributes for ${objType.name} (type ${objType.id})`);
if (!jiraAttributeIds.has(typeName)) {
jiraAttributeIds.set(typeName, new Set());
}
if (attributes.length === 0) {
logger.warn(`SchemaSync: No attributes found for ${objType.name} (type ${objType.id})`);
}
for (const jiraAttr of attributes) {
try {
const attrDef = this.parseAttribute(jiraAttr, typeConfigs);
jiraAttributeIds.get(typeName)!.add(attrDef.jiraId);
// Upsert attribute
if (txDb.isPostgres) {
await txDb.execute(`
INSERT INTO attributes (
jira_attr_id, object_type_name, attr_name, field_name, attr_type,
is_multiple, is_editable, is_required, is_system,
reference_type_name, description, position, discovered_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(jira_attr_id, object_type_name) DO UPDATE SET
attr_name = excluded.attr_name,
field_name = excluded.field_name,
attr_type = excluded.attr_type,
is_multiple = excluded.is_multiple,
is_editable = excluded.is_editable,
is_required = excluded.is_required,
is_system = excluded.is_system,
reference_type_name = excluded.reference_type_name,
description = excluded.description,
position = excluded.position
`, [
attrDef.jiraId,
typeName,
attrDef.name,
attrDef.fieldName,
attrDef.type,
attrDef.isMultiple,
attrDef.isEditable,
attrDef.isRequired,
attrDef.isSystem,
attrDef.referenceTypeName || null,
attrDef.description || null,
attrDef.position ?? 0,
now,
]);
} else {
await txDb.execute(`
INSERT INTO attributes (
jira_attr_id, object_type_name, attr_name, field_name, attr_type,
is_multiple, is_editable, is_required, is_system,
reference_type_name, description, position, discovered_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(jira_attr_id, object_type_name) DO UPDATE SET
attr_name = excluded.attr_name,
field_name = excluded.field_name,
attr_type = excluded.attr_type,
is_multiple = excluded.is_multiple,
is_editable = excluded.is_editable,
is_required = excluded.is_required,
is_system = excluded.is_system,
reference_type_name = excluded.reference_type_name,
description = excluded.description,
position = excluded.position
`, [
attrDef.jiraId,
typeName,
attrDef.name,
attrDef.fieldName,
attrDef.type,
attrDef.isMultiple ? 1 : 0,
attrDef.isEditable ? 1 : 0,
attrDef.isRequired ? 1 : 0,
attrDef.isSystem ? 1 : 0,
attrDef.referenceTypeName || null,
attrDef.description || null,
attrDef.position ?? 0,
now,
]);
}
attributesProcessed++;
} catch (error) {
logger.error(`SchemaSync: Failed to process attribute ${jiraAttr.id} (${jiraAttr.name}) for ${objType.name}`, error);
if (error instanceof Error) {
logger.error(`SchemaSync: Attribute error details: ${error.message}`, error.stack);
}
errors.push({
type: 'attribute',
id: jiraAttr.id,
message: error instanceof Error ? error.message : String(error),
});
}
}
logger.info(`SchemaSync: Processed ${attributesProcessed} attributes for ${objType.name} (type ${objType.id})`);
this.progress.objectTypesCompleted++;
} catch (error) {
logger.warn(`SchemaSync: Failed to process object type ${objType.id}`, error);
errors.push({
type: 'objectType',
id: objType.id,
message: error instanceof Error ? error.message : String(error),
});
}
}
this.progress.schemasCompleted++;
schemasProcessed++;
} catch (error) {
logger.error(`SchemaSync: Failed to process schema ${schema.id}`, error);
errors.push({
type: 'schema',
id: schema.id.toString(),
message: error instanceof Error ? error.message : String(error),
});
}
}
// Step 6: Clean up orphaned records (hard delete)
logger.info('SchemaSync: Cleaning up orphaned records...');
// Delete orphaned schemas
const allLocalSchemas = await txDb.query<{ jira_schema_id: string }>(
`SELECT jira_schema_id FROM schemas`
);
for (const localSchema of allLocalSchemas) {
if (!jiraSchemaIds.has(localSchema.jira_schema_id)) {
logger.info(`SchemaSync: Deleting orphaned schema ${localSchema.jira_schema_id}`);
await txDb.execute(`DELETE FROM schemas WHERE jira_schema_id = ?`, [localSchema.jira_schema_id]);
schemasDeleted++;
}
}
// Delete orphaned object types
// First, get all object types from all remaining schemas
const allLocalObjectTypes = await txDb.query<{ schema_id: number; jira_type_id: number; jira_schema_id: string }>(
`SELECT ot.schema_id, ot.jira_type_id, s.jira_schema_id
FROM object_types ot
JOIN schemas s ON ot.schema_id = s.id`
);
for (const localType of allLocalObjectTypes) {
const schemaIdStr = localType.jira_schema_id;
const typeIds = jiraObjectTypeIds.get(schemaIdStr);
// If schema doesn't exist in Jira anymore, or type doesn't exist in schema
if (!jiraSchemaIds.has(schemaIdStr) || (typeIds && !typeIds.has(localType.jira_type_id))) {
logger.info(`SchemaSync: Deleting orphaned object type ${localType.jira_type_id} from schema ${schemaIdStr}`);
await txDb.execute(
`DELETE FROM object_types WHERE schema_id = ? AND jira_type_id = ?`,
[localType.schema_id, localType.jira_type_id]
);
objectTypesDeleted++;
}
}
// Delete orphaned attributes
// Get all attributes and check against synced types
const allLocalAttributes = await txDb.query<{ object_type_name: string; jira_attr_id: number }>(
`SELECT object_type_name, jira_attr_id FROM attributes`
);
for (const localAttr of allLocalAttributes) {
const attrIds = jiraAttributeIds.get(localAttr.object_type_name);
// If type wasn't synced or attribute doesn't exist in type
if (!attrIds || !attrIds.has(localAttr.jira_attr_id)) {
logger.info(`SchemaSync: Deleting orphaned attribute ${localAttr.jira_attr_id} from type ${localAttr.object_type_name}`);
await txDb.execute(
`DELETE FROM attributes WHERE object_type_name = ? AND jira_attr_id = ?`,
[localAttr.object_type_name, localAttr.jira_attr_id]
);
attributesDeleted++;
}
}
logger.info(`SchemaSync: Cleanup complete - ${schemasDeleted} schemas, ${objectTypesDeleted} object types, ${attributesDeleted} attributes deleted`);
});
const duration = Date.now() - startTime;
this.progress.status = 'completed';
logger.info(`SchemaSync: Synchronization complete in ${duration}ms - ${schemasProcessed} schemas, ${objectTypesProcessed} object types, ${attributesProcessed} attributes, ${schemasDeleted} deleted schemas, ${objectTypesDeleted} deleted types, ${attributesDeleted} deleted attributes`);
if (attributesProcessed === 0) {
logger.warn(`SchemaSync: WARNING - No attributes were saved! Check logs for errors.`);
}
if (errors.length > 0) {
logger.warn(`SchemaSync: Sync completed with ${errors.length} errors:`, errors);
}
return {
success: errors.length === 0,
schemasProcessed,
objectTypesProcessed,
attributesProcessed,
schemasDeleted,
objectTypesDeleted,
attributesDeleted,
errors,
duration,
};
} catch (error) {
this.progress.status = 'failed';
logger.error('SchemaSync: Synchronization failed', error);
throw error;
}
}
/**
* Sync a single schema by ID
*/
async syncSchema(schemaId: number): Promise<SyncResult> {
// For single schema sync, we can reuse syncAll logic but filter
// For now, just call syncAll (it's idempotent)
logger.info(`SchemaSync: Syncing single schema ${schemaId}`);
return this.syncAll();
}
/**
* Get sync status/progress
*/
getProgress(): SyncProgress {
return { ...this.progress };
}
}
// Export singleton instance
export const schemaSyncService = new SchemaSyncService();

View File

@@ -0,0 +1,68 @@
/**
* ServiceFactory - Creates and initializes all services
*
* Single entry point for service initialization and dependency injection.
*/
import { getDatabaseAdapter } from './database/singleton.js';
import { ensureSchemaInitialized } from './database/normalized-schema-init.js';
import { SchemaRepository } from '../repositories/SchemaRepository.js';
import { ObjectCacheRepository } from '../repositories/ObjectCacheRepository.js';
import { SchemaSyncService } from './SchemaSyncService.js';
import { ObjectSyncService } from './ObjectSyncService.js';
import { PayloadProcessor } from './PayloadProcessor.js';
import { QueryService } from './QueryService.js';
import { RefreshService } from './RefreshService.js';
import { WriteThroughService } from './WriteThroughService.js';
import { logger } from './logger.js';
/**
* All services container
*/
export class ServiceFactory {
public readonly schemaRepo: SchemaRepository;
public readonly cacheRepo: ObjectCacheRepository;
public readonly schemaSyncService: SchemaSyncService;
public readonly objectSyncService: ObjectSyncService;
public readonly payloadProcessor: PayloadProcessor;
public readonly queryService: QueryService;
public readonly refreshService: RefreshService;
public readonly writeThroughService: WriteThroughService;
private static instance: ServiceFactory | null = null;
private constructor() {
// Use shared database adapter singleton
const db = getDatabaseAdapter();
// Initialize repositories
this.schemaRepo = new SchemaRepository(db);
this.cacheRepo = new ObjectCacheRepository(db);
// Initialize services
this.schemaSyncService = new SchemaSyncService(this.schemaRepo);
this.objectSyncService = new ObjectSyncService(this.schemaRepo, this.cacheRepo);
this.payloadProcessor = new PayloadProcessor(this.schemaRepo, this.cacheRepo);
this.queryService = new QueryService(this.schemaRepo, this.cacheRepo);
this.refreshService = new RefreshService(this.objectSyncService);
this.writeThroughService = new WriteThroughService(this.objectSyncService, this.schemaRepo);
// Ensure schema is initialized (async, but don't block)
ensureSchemaInitialized().catch(error => {
logger.error('ServiceFactory: Failed to initialize database schema', error);
});
}
/**
* Get singleton instance
*/
static getInstance(): ServiceFactory {
if (!ServiceFactory.instance) {
ServiceFactory.instance = new ServiceFactory();
}
return ServiceFactory.instance;
}
}
// Export singleton instance getter
export const getServices = () => ServiceFactory.getInstance();

View File

@@ -0,0 +1,153 @@
/**
* WriteThroughService - Write-through updates to Jira and DB
*
* Writes to Jira Assets API, then immediately updates DB cache.
*/
import { logger } from './logger.js';
import { jiraAssetsClient } from '../infrastructure/jira/JiraAssetsClient.js';
import { ObjectSyncService } from './ObjectSyncService.js';
import { SchemaRepository } from '../repositories/SchemaRepository.js';
import type { CMDBObject, CMDBObjectTypeName } from '../generated/jira-types.js';
export interface UpdateResult {
success: boolean;
data?: CMDBObject;
error?: string;
}
export class WriteThroughService {
constructor(
private syncService: ObjectSyncService,
private schemaRepo: SchemaRepository
) {}
/**
* Update an object (write-through)
*
* 1. Build Jira update payload from field updates
* 2. Send update to Jira Assets API
* 3. Fetch fresh data from Jira
* 4. Update DB cache using same normalization logic
*/
async updateObject(
typeName: CMDBObjectTypeName,
objectId: string,
updates: Record<string, unknown>
): Promise<UpdateResult> {
try {
// Get attribute definitions for this type
const attributeDefs = await this.schemaRepo.getAttributesForType(typeName);
const attrMapByName = new Map(attributeDefs.map(a => [a.fieldName, a]));
// Build Jira update payload
const payload = {
attributes: [] as Array<{
objectTypeAttributeId: number;
objectAttributeValues: Array<{ value?: string }>;
}>,
};
for (const [fieldName, value] of Object.entries(updates)) {
const attrDef = attrMapByName.get(fieldName);
if (!attrDef) {
logger.warn(`WriteThroughService: Unknown field ${fieldName} for type ${typeName}`);
continue;
}
if (!attrDef.isEditable) {
logger.warn(`WriteThroughService: Field ${fieldName} is not editable`);
continue;
}
// Build attribute values based on type
const attrValues = this.buildAttributeValues(value, attrDef);
if (attrValues.length > 0 || value === null || value === undefined) {
// Include attribute even if clearing (empty array)
payload.attributes.push({
objectTypeAttributeId: attrDef.jiraAttrId,
objectAttributeValues: attrValues,
});
}
}
if (payload.attributes.length === 0) {
return { success: true }; // No attributes to update
}
// Send update to Jira
await jiraAssetsClient.updateObject(objectId, payload);
// Fetch fresh data from Jira
const entry = await jiraAssetsClient.getObject(objectId);
if (!entry) {
return {
success: false,
error: 'Object not found in Jira after update',
};
}
// Get enabled types for sync policy
const enabledTypes = await this.schemaRepo.getEnabledObjectTypes();
const enabledTypeSet = new Set(enabledTypes.map(t => t.typeName));
// Update DB cache using sync service
const syncResult = await this.syncService.syncSingleObject(objectId, enabledTypeSet);
if (!syncResult.cached) {
logger.warn(`WriteThroughService: Failed to update cache after Jira update: ${syncResult.error}`);
// Still return success if Jira update succeeded
}
// Fetch updated object from DB
// Note: We'd need QueryService here, but to avoid circular deps,
// we'll return success and let caller refresh if needed
return { success: true };
} catch (error) {
logger.error(`WriteThroughService: Failed to update object ${objectId}`, error);
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}
/**
* Build Jira attribute values from TypeScript value
*/
private buildAttributeValues(
value: unknown,
attrDef: { attrType: string; isMultiple: boolean }
): Array<{ value?: string }> {
// Null/undefined = clear the field
if (value === null || value === undefined) {
return [];
}
// Reference type
if (attrDef.attrType === 'reference') {
if (attrDef.isMultiple && Array.isArray(value)) {
return (value as Array<{ objectKey?: string }>).map(ref => ({
value: ref.objectKey,
})).filter(v => v.value);
} else if (!attrDef.isMultiple) {
const ref = value as { objectKey?: string };
return ref.objectKey ? [{ value: ref.objectKey }] : [];
}
return [];
}
// Boolean
if (attrDef.attrType === 'boolean') {
return [{ value: value ? 'true' : 'false' }];
}
// Number types
if (attrDef.attrType === 'integer' || attrDef.attrType === 'float') {
return [{ value: String(value) }];
}
// String types
return [{ value: String(value) }];
}
}

View File

@@ -150,15 +150,19 @@ class AuthService {
}
// Check if expired
if (new Date(session.expires_at) < new Date()) {
const expiresAt = new Date(session.expires_at);
const now = new Date();
if (expiresAt < now) {
await db.execute('DELETE FROM sessions WHERE id = ?', [sessionId]);
return null;
}
return session;
} finally {
await db.close();
} catch (error) {
logger.error(`[getSessionFromDb] Error querying session: ${sessionId.substring(0, 8)}...`, error);
throw error;
}
// Note: Don't close the database adapter - it's a singleton that should remain open
}
/**

View File

@@ -8,7 +8,7 @@
*/
import { logger } from './logger.js';
import { cacheStore, type CacheStats } from './cacheStore.js';
import { normalizedCacheStore as cacheStore, type CacheStats } from './normalizedCacheStore.js';
import { jiraAssetsClient, type JiraUpdatePayload, JiraObjectNotFoundError } from './jiraAssetsClient.js';
import { conflictResolver, type ConflictCheckResult } from './conflictResolver.js';
import { OBJECT_TYPES, getAttributeDefinition } from '../generated/jira-schema.js';
@@ -65,7 +65,11 @@ class CMDBService {
return cached;
}
// Cache miss: fetch from Jira
// Cache miss: check if cache is cold and trigger background warming
// Note: Background cache warming removed - syncs must be triggered manually from GUI
// The isWarm() check is kept for status reporting, but no auto-warming
// Fetch from Jira (don't wait for warming)
return this.fetchAndCacheObject<T>(typeName, id);
}
@@ -122,13 +126,48 @@ class CMDBService {
): Promise<T | null> {
try {
const jiraObj = await jiraAssetsClient.getObject(id);
if (!jiraObj) return null;
const parsed = jiraAssetsClient.parseObject<T>(jiraObj);
if (parsed) {
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
if (!jiraObj) {
logger.warn(`CMDBService: Jira API returned null for object ${typeName}/${id}`);
return null;
}
let parsed: T | null;
try {
parsed = await jiraAssetsClient.parseObject<T>(jiraObj);
} catch (parseError) {
// parseObject throws errors for missing required fields - log and return null
logger.error(`CMDBService: Failed to parse object ${typeName}/${id} from Jira:`, parseError);
logger.debug(`CMDBService: Jira object that failed to parse:`, {
id: jiraObj.id,
objectKey: jiraObj.objectKey,
label: jiraObj.label,
objectType: jiraObj.objectType?.name,
attributesCount: jiraObj.attributes?.length || 0,
});
return null;
}
if (!parsed) {
logger.warn(`CMDBService: Failed to parse object ${typeName}/${id} from Jira (parseObject returned null)`);
return null;
}
// Validate parsed object has required fields before caching
if (!parsed.id || !parsed.objectKey || !parsed.label) {
logger.error(`CMDBService: Parsed object ${typeName}/${id} is missing required fields. Parsed object: ${JSON.stringify({
id: parsed.id,
objectKey: parsed.objectKey,
label: parsed.label,
hasId: 'id' in parsed,
hasObjectKey: 'objectKey' in parsed,
hasLabel: 'label' in parsed,
resultKeys: Object.keys(parsed),
})}`);
return null; // Return null instead of throwing to allow graceful degradation
}
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
return parsed;
} catch (error) {
// If object was deleted from Jira, remove it from our cache
@@ -139,11 +178,48 @@ class CMDBService {
}
return null;
}
// Re-throw other errors
throw error;
// Log other errors but return null instead of throwing to prevent cascading failures
logger.error(`CMDBService: Unexpected error fetching object ${typeName}/${id}:`, error);
return null;
}
}
/**
* Batch fetch multiple objects from Jira and update cache
* Much more efficient than fetching objects one by one
*/
async batchFetchAndCacheObjects<T extends CMDBObject>(
typeName: CMDBObjectTypeName,
ids: string[]
): Promise<T[]> {
if (ids.length === 0) return [];
logger.debug(`CMDBService: Batch fetching ${ids.length} ${typeName} objects from Jira`);
// Fetch all objects in parallel (but limit concurrency to avoid overwhelming Jira)
const BATCH_SIZE = 20; // Fetch 20 objects at a time
const results: T[] = [];
for (let i = 0; i < ids.length; i += BATCH_SIZE) {
const batch = ids.slice(i, i + BATCH_SIZE);
const batchPromises = batch.map(async (id) => {
try {
return await this.fetchAndCacheObject<T>(typeName, id);
} catch (error) {
logger.warn(`CMDBService: Failed to fetch ${typeName}/${id} in batch`, error);
return null;
}
});
const batchResults = await Promise.all(batchPromises);
const validResults = batchResults.filter((obj): obj is T => obj !== null);
results.push(...validResults);
}
logger.debug(`CMDBService: Successfully batch fetched ${results.length}/${ids.length} ${typeName} objects`);
return results;
}
/**
* Get all objects of a type from cache
*/
@@ -430,6 +506,20 @@ class CMDBService {
return await cacheStore.isWarm();
}
/**
* Trigger background cache warming if cache is cold
* This is called on-demand when cache misses occur
*/
private async triggerBackgroundWarming(): Promise<void> {
try {
const { jiraAssetsService } = await import('./jiraAssets.js');
await jiraAssetsService.preWarmFullCache();
} catch (error) {
// Silently fail - warming is optional
logger.debug('On-demand cache warming failed', error);
}
}
/**
* Clear cache for a specific type
*/

View File

@@ -0,0 +1,284 @@
/**
* Data Integrity Service
*
* Handles validation and repair of broken references and other data integrity issues.
*/
import { logger } from './logger.js';
import { normalizedCacheStore as cacheStore } from './normalizedCacheStore.js';
import { jiraAssetsClient, JiraObjectNotFoundError } from './jiraAssetsClient.js';
import type { CMDBObject } from '../generated/jira-types.js';
export interface BrokenReference {
object_id: string;
attribute_id: number;
reference_object_id: string;
field_name: string;
object_type_name: string;
object_key: string;
label: string;
}
export interface RepairResult {
total: number;
repaired: number;
deleted: number;
failed: number;
errors: Array<{ reference: BrokenReference; error: string }>;
}
export interface ValidationResult {
brokenReferences: number;
objectsWithBrokenRefs: number;
lastValidated: string;
}
class DataIntegrityService {
/**
* Validate all references in the cache
*/
async validateReferences(): Promise<ValidationResult> {
const brokenCount = await cacheStore.getBrokenReferencesCount();
// Count unique objects with broken references
const brokenRefs = await cacheStore.getBrokenReferences(10000, 0);
const uniqueObjectIds = new Set(brokenRefs.map(ref => ref.object_id));
return {
brokenReferences: brokenCount,
objectsWithBrokenRefs: uniqueObjectIds.size,
lastValidated: new Date().toISOString(),
};
}
/**
* Repair broken references
*
* @param mode - 'delete': Remove broken references, 'fetch': Try to fetch missing objects from Jira, 'dry-run': Just report
* @param batchSize - Number of references to process at a time
* @param maxRepairs - Maximum number of repairs to attempt (0 = unlimited)
*/
async repairBrokenReferences(
mode: 'delete' | 'fetch' | 'dry-run' = 'fetch',
batchSize: number = 100,
maxRepairs: number = 0
): Promise<RepairResult> {
const result: RepairResult = {
total: 0,
repaired: 0,
deleted: 0,
failed: 0,
errors: [],
};
let offset = 0;
let processed = 0;
while (true) {
// Fetch batch of broken references
const brokenRefs = await cacheStore.getBrokenReferences(batchSize, offset);
if (brokenRefs.length === 0) break;
result.total += brokenRefs.length;
for (const ref of brokenRefs) {
// Check max repairs limit
if (maxRepairs > 0 && processed >= maxRepairs) {
logger.info(`DataIntegrityService: Reached max repairs limit (${maxRepairs})`);
break;
}
try {
if (mode === 'dry-run') {
// Just count, don't repair
processed++;
continue;
}
if (mode === 'fetch') {
// Try to fetch the referenced object from Jira
const fetchResult = await this.validateAndFetchReference(ref.reference_object_id);
if (fetchResult.exists && fetchResult.object) {
// Object was successfully fetched and cached
logger.debug(`DataIntegrityService: Repaired reference from ${ref.object_key}.${ref.field_name} to ${ref.reference_object_id}`);
result.repaired++;
} else {
// Object doesn't exist in Jira, delete the reference
await this.deleteBrokenReference(ref);
logger.debug(`DataIntegrityService: Deleted broken reference from ${ref.object_key}.${ref.field_name} to ${ref.reference_object_id} (object not found in Jira)`);
result.deleted++;
}
} else if (mode === 'delete') {
// Directly delete the broken reference
await this.deleteBrokenReference(ref);
result.deleted++;
}
processed++;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error(`DataIntegrityService: Failed to repair reference from ${ref.object_key}.${ref.field_name} to ${ref.reference_object_id}`, error);
result.failed++;
result.errors.push({
reference: ref,
error: errorMessage,
});
}
}
// Check if we should continue
if (brokenRefs.length < batchSize || (maxRepairs > 0 && processed >= maxRepairs)) {
break;
}
offset += batchSize;
}
logger.info(`DataIntegrityService: Repair completed - Total: ${result.total}, Repaired: ${result.repaired}, Deleted: ${result.deleted}, Failed: ${result.failed}`);
return result;
}
/**
* Validate and fetch a referenced object
*/
private async validateAndFetchReference(
referenceObjectId: string
): Promise<{ exists: boolean; object?: CMDBObject }> {
// 1. Check cache first
const db = (cacheStore as any).db;
if (db) {
const objRow = await db.queryOne<{
id: string;
object_type_name: string;
}>(`
SELECT id, object_type_name
FROM objects
WHERE id = ?
`, [referenceObjectId]);
if (objRow) {
const cached = await cacheStore.getObject(objRow.object_type_name as any, referenceObjectId);
if (cached) {
return { exists: true, object: cached };
}
}
}
// 2. Try to fetch from Jira
try {
const jiraObj = await jiraAssetsClient.getObject(referenceObjectId);
if (jiraObj) {
// Parse and cache
const parsed = await jiraAssetsClient.parseObject(jiraObj);
if (parsed) {
await cacheStore.upsertObject(parsed._objectType, parsed);
await cacheStore.extractAndStoreRelations(parsed._objectType, parsed);
return { exists: true, object: parsed };
}
}
} catch (error) {
if (error instanceof JiraObjectNotFoundError) {
return { exists: false };
}
// Re-throw other errors
throw error;
}
return { exists: false };
}
/**
* Delete a broken reference
*/
private async deleteBrokenReference(ref: BrokenReference): Promise<void> {
const db = (cacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.execute(`
DELETE FROM attribute_values
WHERE object_id = ?
AND attribute_id = ?
AND reference_object_id = ?
`, [ref.object_id, ref.attribute_id, ref.reference_object_id]);
}
/**
* Cleanup orphaned attribute values (values without parent object)
*/
async cleanupOrphanedAttributeValues(): Promise<number> {
const db = (cacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
const result = await db.execute(`
DELETE FROM attribute_values
WHERE object_id NOT IN (SELECT id FROM objects)
`);
logger.info(`DataIntegrityService: Cleaned up ${result} orphaned attribute values`);
return result;
}
/**
* Cleanup orphaned relations (relations where source or target doesn't exist)
*/
async cleanupOrphanedRelations(): Promise<number> {
const db = (cacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
const result = await db.execute(`
DELETE FROM object_relations
WHERE source_id NOT IN (SELECT id FROM objects)
OR target_id NOT IN (SELECT id FROM objects)
`);
logger.info(`DataIntegrityService: Cleaned up ${result} orphaned relations`);
return result;
}
/**
* Full integrity check and repair
*/
async fullIntegrityCheck(repair: boolean = false): Promise<{
validation: ValidationResult;
repair?: RepairResult;
orphanedValues: number;
orphanedRelations: number;
}> {
logger.info('DataIntegrityService: Starting full integrity check...');
const validation = await this.validateReferences();
const orphanedValues = await this.cleanupOrphanedAttributeValues();
const orphanedRelations = await this.cleanupOrphanedRelations();
let repairResult: RepairResult | undefined;
if (repair) {
repairResult = await this.repairBrokenReferences('fetch', 100, 0);
}
logger.info('DataIntegrityService: Integrity check completed', {
brokenReferences: validation.brokenReferences,
orphanedValues,
orphanedRelations,
repaired: repairResult?.repaired || 0,
deleted: repairResult?.deleted || 0,
});
return {
validation,
repair: repairResult,
orphanedValues,
orphanedRelations,
};
}
}
export const dataIntegrityService = new DataIntegrityService();

View File

@@ -1,17 +1,16 @@
/**
* DataService - Main entry point for application data access
*
* Routes requests to either:
* - CMDBService (using local cache) for real Jira data
* - MockDataService for development without Jira
* ALWAYS uses Jira Assets API via CMDBService (local cache layer).
* Mock data has been removed - all data must come from Jira Assets.
*/
import { config } from '../config/env.js';
import { cmdbService, type UpdateResult } from './cmdbService.js';
import { cacheStore, type CacheStats } from './cacheStore.js';
import { normalizedCacheStore as cacheStore, type CacheStats } from './normalizedCacheStore.js';
import { normalizedCacheStore } from './normalizedCacheStore.js';
import { jiraAssetsClient } from './jiraAssetsClient.js';
import { jiraAssetsService } from './jiraAssets.js';
import { mockDataService } from './mockData.js';
import { logger } from './logger.js';
import type {
ApplicationComponent,
@@ -47,16 +46,8 @@ import type {
import { calculateRequiredEffortWithMinMax } from './effortCalculation.js';
import { calculateApplicationCompleteness } from './dataCompletenessConfig.js';
// Determine if we should use real Jira Assets or mock data
// Jira PAT is now configured per-user, so we check if schema is configured
// The actual PAT is provided per-request via middleware
const useJiraAssets = !!config.jiraSchemaId;
if (useJiraAssets) {
logger.info('DataService: Using CMDB cache layer with Jira Assets API');
} else {
logger.info('DataService: Using mock data (Jira credentials not configured)');
}
// NOTE: All data comes from Jira Assets API - no mock data fallback
// If schemas aren't configured yet, operations will fail gracefully with appropriate errors
// =============================================================================
// Reference Cache (for enriching IDs to ObjectReferences)
@@ -121,42 +112,111 @@ async function lookupReferences<T extends CMDBObject>(
// Helper Functions
// =============================================================================
/**
* Load description for an object from database
* Looks for a description attribute (field_name like 'description' or attr_name like 'Description')
*/
async function getDescriptionFromDatabase(objectId: string): Promise<string | null> {
try {
const { normalizedCacheStore } = await import('./normalizedCacheStore.js');
const db = (normalizedCacheStore as any).db;
if (!db) return null;
// Try to find description attribute by common field names
const descriptionFieldNames = ['description', 'Description', 'DESCRIPTION'];
// First, get the object to find its type
const objRow = await db.queryOne<{ object_type_name: string }>(`
SELECT object_type_name FROM objects WHERE id = ?
`, [objectId]);
if (!objRow) return null;
// Try each possible description field name
for (const fieldName of descriptionFieldNames) {
const descRow = await db.queryOne<{ text_value: string }>(`
SELECT av.text_value
FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = ?
AND (a.field_name = ? OR a.attr_name = ?)
AND av.text_value IS NOT NULL
AND av.text_value != ''
LIMIT 1
`, [objectId, fieldName, fieldName]);
if (descRow?.text_value) {
return descRow.text_value;
}
}
return null;
} catch (error) {
logger.debug(`Failed to get description from database for object ${objectId}`, error);
return null;
}
}
/**
* Convert ObjectReference to ReferenceValue format used by frontend
* Try to enrich with description from jiraAssetsService cache if available
* If not in cache or cache entry has no description, fetch it async
* PRIMARY: Load from database cache (no API calls)
* FALLBACK: Only use API if object not in database
*/
async function toReferenceValue(ref: ObjectReference | null | undefined): Promise<ReferenceValue | null> {
if (!ref) return null;
// Try to get enriched ReferenceValue from jiraAssetsService cache (includes description if available)
const enriched = useJiraAssets ? jiraAssetsService.getEnrichedReferenceValue(ref.objectKey, ref.objectId) : null;
// PRIMARY SOURCE: Try to load from database first (no API calls)
try {
const { normalizedCacheStore } = await import('./normalizedCacheStore.js');
const db = (normalizedCacheStore as any).db;
if (db) {
await db.ensureInitialized?.();
// Get basic object info from database
const objRow = await db.queryOne<{
id: string;
object_key: string;
label: string;
}>(`
SELECT id, object_key, label
FROM objects
WHERE id = ? OR object_key = ?
LIMIT 1
`, [ref.objectId, ref.objectKey]);
if (objRow) {
// Object exists in database - extract description if available
const description = await getDescriptionFromDatabase(objRow.id);
return {
objectId: objRow.id,
key: objRow.object_key || ref.objectKey,
name: objRow.label || ref.label,
...(description && { description }),
};
}
}
} catch (error) {
logger.debug(`Failed to load reference object ${ref.objectId} from database`, error);
}
// FALLBACK: Object not in database - check Jira Assets service cache
// Only fetch from API if really needed (object missing from database)
const enriched = jiraAssetsService.getEnrichedReferenceValue(ref.objectKey, ref.objectId);
if (enriched && enriched.description) {
// Use enriched value with description
// Use enriched value with description from service cache
return enriched;
}
// Cache miss or no description - fetch it async if using Jira Assets
if (useJiraAssets && enriched && !enriched.description) {
// We have a cached value but it lacks description - fetch it
const fetched = await jiraAssetsService.fetchEnrichedReferenceValue(ref.objectKey, ref.objectId);
if (fetched) {
return fetched;
}
// If fetch failed, return the cached value anyway
// Last resort: Object not in database and not in service cache
// Only return basic info - don't fetch from API here
// API fetching should only happen during sync operations
if (enriched) {
return enriched;
}
if (useJiraAssets) {
// Cache miss - fetch it
const fetched = await jiraAssetsService.fetchEnrichedReferenceValue(ref.objectKey, ref.objectId);
if (fetched) {
return fetched;
}
}
// Fallback to basic conversion without description (if fetch failed or not using Jira Assets)
// Basic fallback - return what we have from the ObjectReference
return {
objectId: ref.objectId,
key: ref.objectKey,
@@ -172,7 +232,8 @@ function toReferenceValues(refs: ObjectReference[] | null | undefined): Referenc
return refs.map(ref => ({
objectId: ref.objectId,
key: ref.objectKey,
name: ref.label,
// Use label if available, otherwise fall back to objectKey, then objectId
name: ref.label || ref.objectKey || ref.objectId || 'Unknown',
}));
}
@@ -225,6 +286,18 @@ async function toApplicationDetails(app: ApplicationComponent): Promise<Applicat
logger.info(`[toApplicationDetails] Converting cached object ${app.objectKey || app.id} to ApplicationDetails`);
logger.info(`[toApplicationDetails] confluenceSpace from cache: ${app.confluenceSpace} (type: ${typeof app.confluenceSpace})`);
// Debug logging for reference fields
if (process.env.NODE_ENV === 'development') {
logger.debug(`[toApplicationDetails] businessOwner: ${JSON.stringify(app.businessOwner)}`);
logger.debug(`[toApplicationDetails] systemOwner: ${JSON.stringify(app.systemOwner)}`);
logger.debug(`[toApplicationDetails] technicalApplicationManagement: ${JSON.stringify(app.technicalApplicationManagement)}`);
logger.debug(`[toApplicationDetails] supplierProduct: ${JSON.stringify(app.supplierProduct)}`);
logger.debug(`[toApplicationDetails] applicationFunction: ${JSON.stringify(app.applicationFunction)}`);
logger.debug(`[toApplicationDetails] applicationManagementDynamicsFactor: ${JSON.stringify(app.applicationManagementDynamicsFactor)}`);
logger.debug(`[toApplicationDetails] applicationManagementComplexityFactor: ${JSON.stringify(app.applicationManagementComplexityFactor)}`);
logger.debug(`[toApplicationDetails] applicationManagementNumberOfUsers: ${JSON.stringify(app.applicationManagementNumberOfUsers)}`);
}
// Handle confluenceSpace - it can be a string (URL) or number (legacy), convert to string
const confluenceSpaceValue = app.confluenceSpace !== null && app.confluenceSpace !== undefined
? (typeof app.confluenceSpace === 'string' ? app.confluenceSpace : String(app.confluenceSpace))
@@ -302,57 +375,17 @@ async function toApplicationDetails(app: ApplicationComponent): Promise<Applicat
// Convert array of ObjectReferences to ReferenceValue[]
const applicationFunctions = toReferenceValues(app.applicationFunction);
return {
id: app.id,
key: app.objectKey,
name: app.label,
description: app.description || null,
status: (app.status || 'In Production') as ApplicationStatus,
searchReference: app.searchReference || null,
// Organization info
organisation: organisation?.name || null,
businessOwner: extractLabel(app.businessOwner),
systemOwner: extractLabel(app.systemOwner),
functionalApplicationManagement: app.functionalApplicationManagement || null,
technicalApplicationManagement: extractLabel(app.technicalApplicationManagement),
technicalApplicationManagementPrimary: extractDisplayValue(app.technicalApplicationManagementPrimary),
technicalApplicationManagementSecondary: extractDisplayValue(app.technicalApplicationManagementSecondary),
// Technical info
medischeTechniek: app.medischeTechniek || false,
technischeArchitectuur: app.technischeArchitectuurTA || null,
supplierProduct: extractLabel(app.supplierProduct),
// Classification
applicationFunctions,
businessImportance: businessImportance?.name || null,
businessImpactAnalyse,
hostingType,
// Application Management
governanceModel,
applicationType,
applicationSubteam,
applicationTeam,
dynamicsFactor,
complexityFactor,
numberOfUsers,
applicationManagementHosting,
applicationManagementTAM,
platform,
// Override
overrideFTE: app.applicationManagementOverrideFTE ?? null,
requiredEffortApplicationManagement: null,
// Enterprise Architect reference
reference: app.reference || null,
// Confluence Space (URL string)
confluenceSpace: confluenceSpaceValue,
};
// Convert supplier fields to ReferenceValue format
const [
supplierTechnical,
supplierImplementation,
supplierConsultancy,
] = await Promise.all([
toReferenceValue(app.supplierTechnical),
toReferenceValue(app.supplierImplementation),
toReferenceValue(app.supplierConsultancy),
]);
// Calculate data completeness percentage
// Convert ApplicationDetails-like structure to format expected by completeness calculator
@@ -399,6 +432,9 @@ async function toApplicationDetails(app: ApplicationComponent): Promise<Applicat
medischeTechniek: app.medischeTechniek || false,
technischeArchitectuur: app.technischeArchitectuurTA || null,
supplierProduct: extractLabel(app.supplierProduct),
supplierTechnical: supplierTechnical,
supplierImplementation: supplierImplementation,
supplierConsultancy: supplierConsultancy,
// Classification
applicationFunctions,
@@ -659,22 +695,31 @@ export const dataService = {
page: number = 1,
pageSize: number = 25
): Promise<SearchResult> {
if (!useJiraAssets) {
return mockDataService.searchApplications(filters, page, pageSize);
}
// Get all applications from cache
// Get all applications from cache (always from Jira Assets)
let apps = await cmdbService.getObjects<ApplicationComponent>('ApplicationComponent');
logger.debug(`DataService: Found ${apps.length} applications in cache for search`);
// If cache is empty, log a warning
if (apps.length === 0) {
logger.warn('DataService: Cache is empty - no applications found. A full sync may be needed.');
}
// Apply filters locally
if (filters.searchText) {
const search = filters.searchText.toLowerCase();
apps = apps.filter(app =>
app.label.toLowerCase().includes(search) ||
app.objectKey.toLowerCase().includes(search) ||
app.searchReference?.toLowerCase().includes(search) ||
app.description?.toLowerCase().includes(search)
);
if (filters.searchText && filters.searchText.trim()) {
const search = filters.searchText.toLowerCase().trim();
const beforeFilter = apps.length;
apps = apps.filter(app => {
const label = app.label?.toLowerCase() || '';
const objectKey = app.objectKey?.toLowerCase() || '';
const searchRef = app.searchReference?.toLowerCase() || '';
const description = app.description?.toLowerCase() || '';
return label.includes(search) ||
objectKey.includes(search) ||
searchRef.includes(search) ||
description.includes(search);
});
logger.debug(`DataService: Search filter "${filters.searchText}" reduced results from ${beforeFilter} to ${apps.length}`);
}
if (filters.statuses && filters.statuses.length > 0) {
@@ -834,11 +879,14 @@ export const dataService = {
* Get application by ID (from cache)
*/
async getApplicationById(id: string): Promise<ApplicationDetails | null> {
if (!useJiraAssets) {
return mockDataService.getApplicationById(id);
// Try to get by ID first (handles both Jira object IDs and object keys)
let app = await cmdbService.getObject<ApplicationComponent>('ApplicationComponent', id);
// If not found by ID, try by object key (e.g., "ICMT-123" or numeric IDs that might be keys)
if (!app) {
app = await cmdbService.getObjectByKey<ApplicationComponent>('ApplicationComponent', id);
}
const app = await cmdbService.getObject<ApplicationComponent>('ApplicationComponent', id);
if (!app) return null;
return toApplicationDetails(app);
@@ -848,13 +896,18 @@ export const dataService = {
* Get application for editing (force refresh from Jira)
*/
async getApplicationForEdit(id: string): Promise<ApplicationDetails | null> {
if (!useJiraAssets) {
return mockDataService.getApplicationById(id);
}
const app = await cmdbService.getObject<ApplicationComponent>('ApplicationComponent', id, {
// Try to get by ID first (handles both Jira object IDs and object keys)
let app = await cmdbService.getObject<ApplicationComponent>('ApplicationComponent', id, {
forceRefresh: true,
});
// If not found by ID, try by object key (e.g., "ICMT-123" or numeric IDs that might be keys)
if (!app) {
app = await cmdbService.getObjectByKey<ApplicationComponent>('ApplicationComponent', id, {
forceRefresh: true,
});
}
if (!app) return null;
return toApplicationDetails(app);
@@ -884,11 +937,7 @@ export const dataService = {
): Promise<UpdateResult> {
logger.info(`dataService.updateApplication called for ${id}`);
if (!useJiraAssets) {
const success = await mockDataService.updateApplication(id, updates);
return { success };
}
// Always update via Jira Assets API
// Convert to CMDBService format
// IMPORTANT: For reference fields, we pass ObjectReference objects (with objectKey)
// because buildAttributeValues in cmdbService expects to extract objectKey for Jira API
@@ -978,7 +1027,7 @@ export const dataService = {
// ===========================================================================
async getDynamicsFactors(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getDynamicsFactors();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<ApplicationManagementDynamicsFactor>('ApplicationManagementDynamicsFactor');
return items.map(item => ({
objectId: item.id,
@@ -991,7 +1040,7 @@ export const dataService = {
},
async getComplexityFactors(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getComplexityFactors();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<ApplicationManagementComplexityFactor>('ApplicationManagementComplexityFactor');
return items.map(item => ({
objectId: item.id,
@@ -1004,7 +1053,7 @@ export const dataService = {
},
async getNumberOfUsers(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getNumberOfUsers();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<ApplicationManagementNumberOfUsers>('ApplicationManagementNumberOfUsers');
return items.map(item => ({
objectId: item.id,
@@ -1017,7 +1066,7 @@ export const dataService = {
},
async getGovernanceModels(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getGovernanceModels();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<IctGovernanceModel>('IctGovernanceModel');
return items.map(item => ({
objectId: item.id,
@@ -1030,24 +1079,26 @@ export const dataService = {
},
async getOrganisations(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getOrganisations();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<Organisation>('Organisation');
logger.debug(`DataService: Found ${items.length} organisations in cache`);
return items.map(item => ({ objectId: item.id, key: item.objectKey, name: item.label }));
},
async getHostingTypes(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getHostingTypes();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<HostingType>('HostingType');
return items.map(item => ({
objectId: item.id,
key: item.objectKey,
logger.debug(`DataService: Found ${items.length} hosting types in cache`);
return items.map(item => ({
objectId: item.id,
key: item.objectKey,
name: item.label,
summary: item.description || undefined, // Use description as summary for display
}));
},
async getBusinessImpactAnalyses(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getBusinessImpactAnalyses();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<BusinessImpactAnalyse>('BusinessImpactAnalyse');
return items.map(item => ({
objectId: item.id,
@@ -1059,7 +1110,7 @@ export const dataService = {
},
async getApplicationManagementHosting(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getApplicationManagementHosting();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<ApplicationManagementHosting>('ApplicationManagementHosting');
return items.map(item => ({
objectId: item.id,
@@ -1070,7 +1121,7 @@ export const dataService = {
},
async getApplicationManagementTAM(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getApplicationManagementTAM();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<ApplicationManagementTam>('ApplicationManagementTam');
return items.map(item => ({
objectId: item.id,
@@ -1081,7 +1132,7 @@ export const dataService = {
},
async getApplicationFunctions(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getApplicationFunctions();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<ApplicationFunction>('ApplicationFunction');
return items.map(item => ({
objectId: item.id,
@@ -1098,7 +1149,7 @@ export const dataService = {
},
async getApplicationFunctionCategories(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getApplicationFunctionCategories();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<ApplicationFunctionCategory>('ApplicationFunctionCategory');
return items.map(item => ({
objectId: item.id,
@@ -1109,19 +1160,17 @@ export const dataService = {
},
async getApplicationSubteams(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return []; // Mock mode: no subteams
// Use jiraAssetsService directly as schema doesn't include this object type
// Always get from Jira Assets API (schema doesn't include this object type)
return jiraAssetsService.getApplicationSubteams();
},
async getApplicationTeams(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return []; // Mock mode: no teams
// Use jiraAssetsService directly as schema doesn't include this object type
// Always get from Jira Assets API (schema doesn't include this object type)
return jiraAssetsService.getApplicationTeams();
},
async getSubteamToTeamMapping(): Promise<Record<string, ReferenceValue | null>> {
if (!useJiraAssets) return {}; // Mock mode: no mapping
// Always get from Jira Assets API
// Convert Map to plain object for JSON serialization
const mapping = await jiraAssetsService.getSubteamToTeamMapping();
const result: Record<string, ReferenceValue | null> = {};
@@ -1132,7 +1181,7 @@ export const dataService = {
},
async getApplicationTypes(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getApplicationTypes();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<ApplicationManagementApplicationType>('ApplicationManagementApplicationType');
return items.map(item => ({
objectId: item.id,
@@ -1143,8 +1192,9 @@ export const dataService = {
},
async getBusinessImportance(): Promise<ReferenceValue[]> {
if (!useJiraAssets) return mockDataService.getBusinessImportance();
// Always get from Jira Assets cache
const items = await cmdbService.getObjects<BusinessImportance>('BusinessImportance');
logger.debug(`DataService: Found ${items.length} business importance values in cache`);
return items.map(item => ({ objectId: item.id, key: item.objectKey, name: item.label }));
},
@@ -1153,8 +1203,7 @@ export const dataService = {
// ===========================================================================
async getStats(includeDistributions: boolean = true) {
if (!useJiraAssets) return mockDataService.getStats();
// Always get from Jira Assets cache
const allApps = await cmdbService.getObjects<ApplicationComponent>('ApplicationComponent');
// Statuses to exclude for most metrics
@@ -1231,9 +1280,7 @@ export const dataService = {
},
async getTeamDashboardData(excludedStatuses: ApplicationStatus[] = []): Promise<TeamDashboardData> {
if (!useJiraAssets) return mockDataService.getTeamDashboardData(excludedStatuses);
// Use jiraAssetsService directly as it has proper Team/Subteam field parsing
// Always get from Jira Assets API (has proper Team/Subteam field parsing)
return jiraAssetsService.getTeamDashboardData(excludedStatuses);
},
@@ -1253,7 +1300,7 @@ export const dataService = {
applicationCount: number;
}>;
}> {
// For mock data, use the same implementation (cmdbService routes to mock data when useJiraAssets is false)
// Always get from Jira Assets cache
// Get all applications from cache to access all fields including BIA
let apps = await cmdbService.getObjects<ApplicationComponent>('ApplicationComponent');
@@ -1421,13 +1468,13 @@ export const dataService = {
// Utility
// ===========================================================================
isUsingJiraAssets(): boolean {
return useJiraAssets;
async isUsingJiraAssets(): Promise<boolean> {
// Always returns true - mock data removed, only Jira Assets is used
return true;
},
async testConnection(): Promise<boolean> {
if (!useJiraAssets) return true;
// Only test connection if token is configured
// Always test Jira Assets connection (requires token)
if (!jiraAssetsClient.hasToken()) {
return false;
}

View File

@@ -0,0 +1,123 @@
/**
* Fix UNIQUE constraints on object_types table
*
* Removes old UNIQUE constraint on type_name and adds new UNIQUE(schema_id, type_name)
* This allows the same type_name to exist in different schemas
*/
import { logger } from '../logger.js';
import { normalizedCacheStore } from '../normalizedCacheStore.js';
export async function fixObjectTypesConstraints(): Promise<void> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
logger.info('Migration: Fixing UNIQUE constraints on object_types table...');
try {
if (db.isPostgres) {
// Check if old constraint exists
const oldConstraintExists = await db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM pg_constraint
WHERE conname = 'object_types_type_name_key'
`);
if (oldConstraintExists && oldConstraintExists.count > 0) {
logger.info('Migration: Dropping old UNIQUE constraint on type_name...');
await db.execute(`ALTER TABLE object_types DROP CONSTRAINT IF EXISTS object_types_type_name_key`);
}
// Check if new constraint exists
const newConstraintExists = await db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM pg_constraint
WHERE conname = 'object_types_schema_id_type_name_key'
`);
if (!newConstraintExists || newConstraintExists.count === 0) {
logger.info('Migration: Adding UNIQUE constraint on (schema_id, type_name)...');
try {
await db.execute(`
ALTER TABLE object_types
ADD CONSTRAINT object_types_schema_id_type_name_key UNIQUE (schema_id, type_name)
`);
} catch (error: any) {
// If constraint already exists or there are duplicates, log and continue
if (error.message && error.message.includes('already exists')) {
logger.debug('Migration: Constraint already exists, skipping');
} else if (error.message && error.message.includes('duplicate key')) {
logger.warn('Migration: Duplicate (schema_id, type_name) found - this may need manual cleanup');
// Don't throw - allow the application to continue
} else {
throw error;
}
}
} else {
logger.debug('Migration: New UNIQUE constraint already exists');
}
} else {
// SQLite: UNIQUE constraints are part of table definition
// We can't easily modify them, but the schema definition should handle it
logger.debug('Migration: SQLite UNIQUE constraints are handled in table definition');
}
// Step 2: Remove foreign key constraints that reference object_types(type_name)
logger.info('Migration: Removing foreign key constraints on object_types(type_name)...');
try {
if (db.isPostgres) {
// Check and drop foreign keys from attributes table
const attrFkExists = await db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM pg_constraint
WHERE conname LIKE 'attributes_object_type_name_fkey%'
`);
if (attrFkExists && attrFkExists.count > 0) {
logger.info('Migration: Dropping foreign key from attributes table...');
await db.execute(`ALTER TABLE attributes DROP CONSTRAINT IF EXISTS attributes_object_type_name_fkey`);
}
// Check and drop foreign keys from objects table
const objFkExists = await db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM pg_constraint
WHERE conname LIKE 'objects_object_type_name_fkey%'
`);
if (objFkExists && objFkExists.count > 0) {
logger.info('Migration: Dropping foreign key from objects table...');
await db.execute(`ALTER TABLE objects DROP CONSTRAINT IF EXISTS objects_object_type_name_fkey`);
}
// Check and drop foreign keys from schema_mappings table
const mappingFkExists = await db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM pg_constraint
WHERE conname LIKE 'schema_mappings_object_type_name_fkey%'
`);
if (mappingFkExists && mappingFkExists.count > 0) {
logger.info('Migration: Dropping foreign key from schema_mappings table...');
await db.execute(`ALTER TABLE schema_mappings DROP CONSTRAINT IF EXISTS schema_mappings_object_type_name_fkey`);
}
} else {
// SQLite: Foreign keys are part of table definition
// We can't easily drop them, but the new schema definition should handle it
logger.debug('Migration: SQLite foreign keys are handled in table definition');
}
} catch (error) {
logger.warn('Migration: Could not remove foreign key constraints (may not exist)', error);
// Don't throw - allow the application to continue
}
logger.info('Migration: UNIQUE constraints and foreign keys fix completed');
} catch (error) {
logger.warn('Migration: Could not fix constraints (may already be correct)', error);
// Don't throw - allow the application to continue
}
}

View File

@@ -40,4 +40,9 @@ export interface DatabaseAdapter {
* Get database size in bytes (if applicable)
*/
getSizeBytes?(): Promise<number>;
/**
* Indicates if this is a PostgreSQL adapter
*/
isPostgres?: boolean;
}

View File

@@ -0,0 +1,417 @@
/**
* Migration script to migrate from configured_object_types to normalized schema structure
*
* This script:
* 1. Creates schemas table if it doesn't exist
* 2. Migrates unique schemas from configured_object_types to schemas
* 3. Adds schema_id and enabled columns to object_types if they don't exist
* 4. Migrates object types from configured_object_types to object_types with schema_id FK
* 5. Drops configured_object_types table after successful migration
*/
import { logger } from '../logger.js';
import { normalizedCacheStore } from '../normalizedCacheStore.js';
export async function migrateToNormalizedSchema(): Promise<void> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
logger.info('Migration: Starting migration to normalized schema structure...');
try {
await db.transaction(async (txDb) => {
// Step 1: Check if configured_object_types table exists
let configuredTableExists = false;
try {
if (txDb.isPostgres) {
const result = await txDb.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_schema = 'public' AND table_name = 'configured_object_types'
`);
configuredTableExists = (result?.count || 0) > 0;
} else {
const result = await txDb.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM sqlite_master
WHERE type='table' AND name='configured_object_types'
`);
configuredTableExists = (result?.count || 0) > 0;
}
} catch (error) {
logger.debug('Migration: configured_object_types table check failed (may not exist)', error);
}
if (!configuredTableExists) {
logger.info('Migration: configured_object_types table does not exist, skipping migration');
return;
}
// Step 2: Check if schemas table exists, create if not
let schemasTableExists = false;
try {
if (txDb.isPostgres) {
const result = await txDb.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_schema = 'public' AND table_name = 'schemas'
`);
schemasTableExists = (result?.count || 0) > 0;
} else {
const result = await txDb.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM sqlite_master
WHERE type='table' AND name='schemas'
`);
schemasTableExists = (result?.count || 0) > 0;
}
} catch (error) {
logger.debug('Migration: schemas table check failed', error);
}
if (!schemasTableExists) {
logger.info('Migration: Creating schemas table...');
if (txDb.isPostgres) {
await txDb.execute(`
CREATE TABLE IF NOT EXISTS schemas (
id SERIAL PRIMARY KEY,
jira_schema_id TEXT NOT NULL UNIQUE,
name TEXT NOT NULL,
description TEXT,
discovered_at TIMESTAMP NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP NOT NULL DEFAULT NOW()
)
`);
await txDb.execute(`
CREATE INDEX IF NOT EXISTS idx_schemas_jira_schema_id ON schemas(jira_schema_id)
`);
await txDb.execute(`
CREATE INDEX IF NOT EXISTS idx_schemas_name ON schemas(name)
`);
} else {
await txDb.execute(`
CREATE TABLE IF NOT EXISTS schemas (
id INTEGER PRIMARY KEY AUTOINCREMENT,
jira_schema_id TEXT NOT NULL UNIQUE,
name TEXT NOT NULL,
description TEXT,
discovered_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
)
`);
await txDb.execute(`
CREATE INDEX IF NOT EXISTS idx_schemas_jira_schema_id ON schemas(jira_schema_id)
`);
await txDb.execute(`
CREATE INDEX IF NOT EXISTS idx_schemas_name ON schemas(name)
`);
}
}
// Step 3: Migrate unique schemas from configured_object_types to schemas
logger.info('Migration: Migrating schemas from configured_object_types...');
const schemaRows = await txDb.query<{
schema_id: string;
schema_name: string;
min_discovered_at: string;
max_updated_at: string;
}>(`
SELECT
schema_id,
schema_name,
MIN(discovered_at) as min_discovered_at,
MAX(updated_at) as max_updated_at
FROM configured_object_types
GROUP BY schema_id, schema_name
`);
for (const schemaRow of schemaRows) {
if (txDb.isPostgres) {
await txDb.execute(`
INSERT INTO schemas (jira_schema_id, name, description, discovered_at, updated_at)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(jira_schema_id) DO UPDATE SET
name = excluded.name,
updated_at = excluded.updated_at
`, [
schemaRow.schema_id,
schemaRow.schema_name,
null,
schemaRow.min_discovered_at,
schemaRow.max_updated_at,
]);
} else {
await txDb.execute(`
INSERT INTO schemas (jira_schema_id, name, description, discovered_at, updated_at)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(jira_schema_id) DO UPDATE SET
name = excluded.name,
updated_at = excluded.updated_at
`, [
schemaRow.schema_id,
schemaRow.schema_name,
null,
schemaRow.min_discovered_at,
schemaRow.max_updated_at,
]);
}
}
logger.info(`Migration: Migrated ${schemaRows.length} schemas`);
// Step 4: Check if object_types has schema_id and enabled columns
let hasSchemaId = false;
let hasEnabled = false;
try {
if (txDb.isPostgres) {
const columns = await txDb.query<{ column_name: string }>(`
SELECT column_name
FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'object_types'
`);
hasSchemaId = columns.some(c => c.column_name === 'schema_id');
hasEnabled = columns.some(c => c.column_name === 'enabled');
} else {
const tableInfo = await txDb.query<{ name: string }>(`
PRAGMA table_info(object_types)
`);
hasSchemaId = tableInfo.some(c => c.name === 'schema_id');
hasEnabled = tableInfo.some(c => c.name === 'enabled');
}
} catch (error) {
logger.warn('Migration: Could not check object_types columns', error);
}
// Step 5: Add schema_id and enabled columns if they don't exist
if (!hasSchemaId) {
logger.info('Migration: Adding schema_id column to object_types...');
if (txDb.isPostgres) {
await txDb.execute(`
ALTER TABLE object_types
ADD COLUMN schema_id INTEGER REFERENCES schemas(id) ON DELETE CASCADE
`);
} else {
// SQLite doesn't support ALTER TABLE ADD COLUMN with FK, so we'll handle it differently
// For now, just add the column without FK constraint
await txDb.execute(`
ALTER TABLE object_types
ADD COLUMN schema_id INTEGER
`);
}
}
if (!hasEnabled) {
logger.info('Migration: Adding enabled column to object_types...');
if (txDb.isPostgres) {
await txDb.execute(`
ALTER TABLE object_types
ADD COLUMN enabled BOOLEAN NOT NULL DEFAULT FALSE
`);
} else {
await txDb.execute(`
ALTER TABLE object_types
ADD COLUMN enabled INTEGER NOT NULL DEFAULT 0
`);
}
}
// Step 6: Migrate object types from configured_object_types to object_types
logger.info('Migration: Migrating object types from configured_object_types...');
const configuredTypes = await txDb.query<{
schema_id: string;
object_type_id: number;
object_type_name: string;
display_name: string;
description: string | null;
object_count: number;
enabled: boolean | number;
discovered_at: string;
updated_at: string;
}>(`
SELECT
schema_id,
object_type_id,
object_type_name,
display_name,
description,
object_count,
enabled,
discovered_at,
updated_at
FROM configured_object_types
`);
let migratedCount = 0;
for (const configuredType of configuredTypes) {
// Get schema_id (FK) from schemas table
const schemaRow = await txDb.queryOne<{ id: number }>(
`SELECT id FROM schemas WHERE jira_schema_id = ?`,
[configuredType.schema_id]
);
if (!schemaRow) {
logger.warn(`Migration: Schema ${configuredType.schema_id} not found, skipping object type ${configuredType.object_type_name}`);
continue;
}
// Check if object type already exists in object_types
const existingType = await txDb.queryOne<{ jira_type_id: number }>(
`SELECT jira_type_id FROM object_types WHERE jira_type_id = ?`,
[configuredType.object_type_id]
);
if (existingType) {
// Update existing object type with schema_id and enabled
if (txDb.isPostgres) {
await txDb.execute(`
UPDATE object_types
SET
schema_id = ?,
enabled = ?,
display_name = COALESCE(display_name, ?),
description = COALESCE(description, ?),
object_count = COALESCE(object_count, ?),
updated_at = ?
WHERE jira_type_id = ?
`, [
schemaRow.id,
typeof configuredType.enabled === 'boolean' ? configuredType.enabled : configuredType.enabled === 1,
configuredType.display_name,
configuredType.description,
configuredType.object_count,
configuredType.updated_at,
configuredType.object_type_id,
]);
} else {
await txDb.execute(`
UPDATE object_types
SET
schema_id = ?,
enabled = ?,
display_name = COALESCE(display_name, ?),
description = COALESCE(description, ?),
object_count = COALESCE(object_count, ?),
updated_at = ?
WHERE jira_type_id = ?
`, [
schemaRow.id,
typeof configuredType.enabled === 'boolean' ? (configuredType.enabled ? 1 : 0) : configuredType.enabled,
configuredType.display_name,
configuredType.description,
configuredType.object_count,
configuredType.updated_at,
configuredType.object_type_id,
]);
}
} else {
// Insert new object type
// Note: We need sync_priority - use default 0
if (txDb.isPostgres) {
await txDb.execute(`
INSERT INTO object_types (
schema_id, jira_type_id, type_name, display_name, description,
sync_priority, object_count, enabled, discovered_at, updated_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`, [
schemaRow.id,
configuredType.object_type_id,
configuredType.object_type_name,
configuredType.display_name,
configuredType.description,
0, // sync_priority
configuredType.object_count,
typeof configuredType.enabled === 'boolean' ? configuredType.enabled : configuredType.enabled === 1,
configuredType.discovered_at,
configuredType.updated_at,
]);
} else {
await txDb.execute(`
INSERT INTO object_types (
schema_id, jira_type_id, type_name, display_name, description,
sync_priority, object_count, enabled, discovered_at, updated_at
)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`, [
schemaRow.id,
configuredType.object_type_id,
configuredType.object_type_name,
configuredType.display_name,
configuredType.description,
0, // sync_priority
configuredType.object_count,
typeof configuredType.enabled === 'boolean' ? (configuredType.enabled ? 1 : 0) : configuredType.enabled,
configuredType.discovered_at,
configuredType.updated_at,
]);
}
}
migratedCount++;
}
logger.info(`Migration: Migrated ${migratedCount} object types`);
// Step 7: Fix UNIQUE constraints on object_types
logger.info('Migration: Fixing UNIQUE constraints on object_types...');
try {
// Remove old UNIQUE constraint on type_name if it exists
if (txDb.isPostgres) {
// Check if constraint exists
const constraintExists = await txDb.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM pg_constraint
WHERE conname = 'object_types_type_name_key'
`);
if (constraintExists && constraintExists.count > 0) {
logger.info('Migration: Dropping old UNIQUE constraint on type_name...');
await txDb.execute(`ALTER TABLE object_types DROP CONSTRAINT IF EXISTS object_types_type_name_key`);
}
// Add new UNIQUE constraint on (schema_id, type_name)
const newConstraintExists = await txDb.queryOne<{ count: number }>(`
SELECT COUNT(*) as count
FROM pg_constraint
WHERE conname = 'object_types_schema_id_type_name_key'
`);
if (!newConstraintExists || newConstraintExists.count === 0) {
logger.info('Migration: Adding UNIQUE constraint on (schema_id, type_name)...');
await txDb.execute(`
ALTER TABLE object_types
ADD CONSTRAINT object_types_schema_id_type_name_key UNIQUE (schema_id, type_name)
`);
}
} else {
// SQLite: UNIQUE constraints are part of table definition, so we need to recreate
// For now, just log a warning - SQLite doesn't support DROP CONSTRAINT easily
logger.info('Migration: SQLite UNIQUE constraints are handled in table definition');
}
} catch (error) {
logger.warn('Migration: Could not fix UNIQUE constraints (may already be correct)', error);
}
// Step 8: Add indexes if they don't exist
logger.info('Migration: Adding indexes...');
try {
await txDb.execute(`CREATE INDEX IF NOT EXISTS idx_object_types_schema_id ON object_types(schema_id)`);
await txDb.execute(`CREATE INDEX IF NOT EXISTS idx_object_types_enabled ON object_types(enabled)`);
await txDb.execute(`CREATE INDEX IF NOT EXISTS idx_object_types_schema_enabled ON object_types(schema_id, enabled)`);
} catch (error) {
logger.warn('Migration: Some indexes may already exist', error);
}
// Step 9: Drop configured_object_types table
logger.info('Migration: Dropping configured_object_types table...');
await txDb.execute(`DROP TABLE IF EXISTS configured_object_types`);
logger.info('Migration: Dropped configured_object_types table');
});
logger.info('Migration: Migration to normalized schema structure completed successfully');
} catch (error) {
logger.error('Migration: Failed to migrate to normalized schema structure', error);
throw error;
}
}

View File

@@ -6,7 +6,7 @@
import { logger } from '../logger.js';
import type { DatabaseAdapter } from './interface.js';
import { createDatabaseAdapter } from './factory.js';
import { getDatabaseAdapter } from './singleton.js';
// @ts-ignore - bcrypt doesn't have proper ESM types
import bcrypt from 'bcrypt';
@@ -351,6 +351,7 @@ async function seedInitialData(db: DatabaseAdapter): Promise<void> {
{ name: 'manage_users', description: 'Manage users and their roles', resource: 'users' },
{ name: 'manage_roles', description: 'Manage roles and permissions', resource: 'roles' },
{ name: 'manage_settings', description: 'Manage application settings', resource: 'settings' },
{ name: 'admin', description: 'Full administrative access (debug, sync, all operations)', resource: 'admin' },
];
for (const perm of permissions) {
@@ -424,6 +425,43 @@ async function seedInitialData(db: DatabaseAdapter): Promise<void> {
if (adminRole) {
roleIds['administrator'] = adminRole.id;
}
// Ensure "admin" permission exists (may have been added after initial setup)
const adminPerm = await db.queryOne<{ id: number }>(
'SELECT id FROM permissions WHERE name = ?',
['admin']
);
if (!adminPerm) {
// Add missing "admin" permission
await db.execute(
'INSERT INTO permissions (name, description, resource) VALUES (?, ?, ?)',
['admin', 'Full administrative access (debug, sync, all operations)', 'admin']
);
logger.info('Added missing "admin" permission');
}
// Ensure administrator role has "admin" permission
// Get admin permission ID (either existing or newly created)
const adminPermId = adminPerm?.id || (await db.queryOne<{ id: number }>(
'SELECT id FROM permissions WHERE name = ?',
['admin']
))?.id;
if (adminRole && adminPermId) {
const hasAdminPerm = await db.queryOne<{ role_id: number }>(
'SELECT role_id FROM role_permissions WHERE role_id = ? AND permission_id = ?',
[adminRole.id, adminPermId]
);
if (!hasAdminPerm) {
await db.execute(
'INSERT INTO role_permissions (role_id, permission_id) VALUES (?, ?)',
[adminRole.id, adminPermId]
);
logger.info('Assigned "admin" permission to administrator role');
}
}
}
// Create initial admin user if ADMIN_EMAIL and ADMIN_PASSWORD are set
@@ -489,7 +527,8 @@ async function seedInitialData(db: DatabaseAdapter): Promise<void> {
* Main migration function
*/
export async function runMigrations(): Promise<void> {
const db = createDatabaseAdapter();
// Use shared database adapter singleton
const db = getDatabaseAdapter();
try {
logger.info('Running database migrations...');
@@ -526,7 +565,7 @@ let authDatabaseAdapter: DatabaseAdapter | null = null;
export function getAuthDatabase(): DatabaseAdapter {
if (!authDatabaseAdapter) {
// Create adapter with allowClose=false so it won't be closed after operations
authDatabaseAdapter = createDatabaseAdapter(undefined, undefined, false);
authDatabaseAdapter = getDatabaseAdapter();
}
return authDatabaseAdapter;
}

View File

@@ -0,0 +1,43 @@
/**
* Database Schema Initialization
*
* Ensures normalized EAV schema is initialized before services use it.
*/
import { getDatabaseAdapter } from './singleton.js';
import { NORMALIZED_SCHEMA_POSTGRES, NORMALIZED_SCHEMA_SQLITE } from './normalized-schema.js';
import { logger } from '../logger.js';
let initialized = false;
let initializationPromise: Promise<void> | null = null;
/**
* Ensure database schema is initialized
*/
export async function ensureSchemaInitialized(): Promise<void> {
if (initialized) return;
if (initializationPromise) {
await initializationPromise;
return;
}
initializationPromise = (async () => {
try {
// Use shared database adapter singleton
const db = getDatabaseAdapter();
const isPostgres = db.isPostgres === true;
// Execute schema
const schema = isPostgres ? NORMALIZED_SCHEMA_POSTGRES : NORMALIZED_SCHEMA_SQLITE;
await db.exec(schema);
logger.info(`Database schema initialized (${isPostgres ? 'PostgreSQL' : 'SQLite'})`);
initialized = true;
} catch (error) {
logger.error('Failed to initialize database schema', error);
throw error;
}
})();
await initializationPromise;
}

View File

@@ -0,0 +1,329 @@
/**
* Normalized Database Schema
*
* Generic, schema-agnostic normalized structure for CMDB data.
* Works with any Jira Assets configuration.
*/
export const NORMALIZED_SCHEMA_POSTGRES = `
-- =============================================================================
-- Schemas (Jira Assets schemas)
-- =============================================================================
CREATE TABLE IF NOT EXISTS schemas (
id SERIAL PRIMARY KEY,
jira_schema_id TEXT NOT NULL UNIQUE,
name TEXT NOT NULL,
object_schema_key TEXT,
status TEXT,
description TEXT,
search_enabled BOOLEAN NOT NULL DEFAULT TRUE,
discovered_at TIMESTAMP NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP NOT NULL DEFAULT NOW()
);
-- =============================================================================
-- Object Types (discovered from Jira schema, with schema relation and enabled flag)
-- =============================================================================
CREATE TABLE IF NOT EXISTS object_types (
id SERIAL PRIMARY KEY,
schema_id INTEGER NOT NULL REFERENCES schemas(id) ON DELETE CASCADE,
jira_type_id INTEGER NOT NULL,
type_name TEXT NOT NULL,
display_name TEXT NOT NULL,
description TEXT,
sync_priority INTEGER DEFAULT 0,
object_count INTEGER DEFAULT 0,
enabled BOOLEAN NOT NULL DEFAULT FALSE,
discovered_at TIMESTAMP NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP NOT NULL DEFAULT NOW(),
UNIQUE(schema_id, jira_type_id),
UNIQUE(schema_id, type_name)
);
-- =============================================================================
-- Attributes (discovered from Jira schema)
-- =============================================================================
CREATE TABLE IF NOT EXISTS attributes (
id SERIAL PRIMARY KEY,
jira_attr_id INTEGER NOT NULL,
object_type_name TEXT NOT NULL,
attr_name TEXT NOT NULL,
field_name TEXT NOT NULL,
attr_type TEXT NOT NULL,
is_multiple BOOLEAN NOT NULL DEFAULT FALSE,
is_editable BOOLEAN NOT NULL DEFAULT TRUE,
is_required BOOLEAN NOT NULL DEFAULT FALSE,
is_system BOOLEAN NOT NULL DEFAULT FALSE,
reference_type_name TEXT,
description TEXT,
position INTEGER DEFAULT 0,
discovered_at TIMESTAMP NOT NULL DEFAULT NOW(),
UNIQUE(jira_attr_id, object_type_name)
);
-- =============================================================================
-- Objects (minimal metadata)
-- =============================================================================
CREATE TABLE IF NOT EXISTS objects (
id TEXT PRIMARY KEY,
object_key TEXT NOT NULL UNIQUE,
object_type_name TEXT NOT NULL,
label TEXT NOT NULL,
jira_updated_at TIMESTAMP,
jira_created_at TIMESTAMP,
cached_at TIMESTAMP NOT NULL DEFAULT NOW()
);
-- =============================================================================
-- Attribute Values (EAV pattern - generic for all types)
-- =============================================================================
CREATE TABLE IF NOT EXISTS attribute_values (
id SERIAL PRIMARY KEY,
object_id TEXT NOT NULL REFERENCES objects(id) ON DELETE CASCADE,
attribute_id INTEGER NOT NULL REFERENCES attributes(id) ON DELETE CASCADE,
text_value TEXT,
number_value NUMERIC,
boolean_value BOOLEAN,
date_value DATE,
datetime_value TIMESTAMP,
reference_object_id TEXT,
reference_object_key TEXT,
reference_object_label TEXT,
array_index INTEGER DEFAULT 0,
UNIQUE(object_id, attribute_id, array_index)
);
-- =============================================================================
-- Relationships (enhanced existing table)
-- =============================================================================
CREATE TABLE IF NOT EXISTS object_relations (
id SERIAL PRIMARY KEY,
source_id TEXT NOT NULL REFERENCES objects(id) ON DELETE CASCADE,
target_id TEXT NOT NULL REFERENCES objects(id) ON DELETE CASCADE,
attribute_id INTEGER NOT NULL REFERENCES attributes(id) ON DELETE CASCADE,
source_type TEXT NOT NULL,
target_type TEXT NOT NULL,
UNIQUE(source_id, target_id, attribute_id)
);
-- =============================================================================
-- Schema Mappings (object type -> schema ID) - DEPRECATED
-- =============================================================================
CREATE TABLE IF NOT EXISTS schema_mappings (
object_type_name TEXT PRIMARY KEY,
schema_id TEXT NOT NULL,
enabled BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP NOT NULL DEFAULT NOW()
);
-- =============================================================================
-- Sync Metadata (unchanged)
-- =============================================================================
CREATE TABLE IF NOT EXISTS sync_metadata (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at TEXT NOT NULL
);
-- =============================================================================
-- Indexes for Performance
-- =============================================================================
-- Schema indexes
CREATE INDEX IF NOT EXISTS idx_schemas_jira_schema_id ON schemas(jira_schema_id);
CREATE INDEX IF NOT EXISTS idx_schemas_name ON schemas(name);
CREATE INDEX IF NOT EXISTS idx_schemas_search_enabled ON schemas(search_enabled);
-- Object type indexes (for schema queries)
CREATE INDEX IF NOT EXISTS idx_object_types_type_name ON object_types(type_name);
CREATE INDEX IF NOT EXISTS idx_object_types_jira_id ON object_types(jira_type_id);
CREATE INDEX IF NOT EXISTS idx_object_types_schema_id ON object_types(schema_id);
CREATE INDEX IF NOT EXISTS idx_object_types_sync_priority ON object_types(sync_priority);
CREATE INDEX IF NOT EXISTS idx_object_types_enabled ON object_types(enabled);
CREATE INDEX IF NOT EXISTS idx_object_types_schema_enabled ON object_types(schema_id, enabled);
-- Object indexes
CREATE INDEX IF NOT EXISTS idx_objects_type ON objects(object_type_name);
CREATE INDEX IF NOT EXISTS idx_objects_key ON objects(object_key);
CREATE INDEX IF NOT EXISTS idx_objects_label ON objects(label);
CREATE INDEX IF NOT EXISTS idx_objects_cached_at ON objects(cached_at);
-- Attribute indexes
CREATE INDEX IF NOT EXISTS idx_attributes_type ON attributes(object_type_name);
CREATE INDEX IF NOT EXISTS idx_attributes_field ON attributes(field_name);
CREATE INDEX IF NOT EXISTS idx_attributes_jira_id ON attributes(jira_attr_id);
CREATE INDEX IF NOT EXISTS idx_attributes_type_field ON attributes(object_type_name, field_name);
-- Attribute value indexes (critical for query performance)
CREATE INDEX IF NOT EXISTS idx_attr_values_object ON attribute_values(object_id);
CREATE INDEX IF NOT EXISTS idx_attr_values_attr ON attribute_values(attribute_id);
CREATE INDEX IF NOT EXISTS idx_attr_values_text ON attribute_values(text_value) WHERE text_value IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_attr_values_number ON attribute_values(number_value) WHERE number_value IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_attr_values_reference ON attribute_values(reference_object_id) WHERE reference_object_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_attr_values_composite_text ON attribute_values(attribute_id, text_value) WHERE text_value IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_attr_values_composite_ref ON attribute_values(attribute_id, reference_object_id) WHERE reference_object_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_attr_values_object_attr ON attribute_values(object_id, attribute_id);
-- Relation indexes
CREATE INDEX IF NOT EXISTS idx_relations_source ON object_relations(source_id);
CREATE INDEX IF NOT EXISTS idx_relations_target ON object_relations(target_id);
CREATE INDEX IF NOT EXISTS idx_relations_attr ON object_relations(attribute_id);
CREATE INDEX IF NOT EXISTS idx_relations_source_type ON object_relations(source_id, source_type);
CREATE INDEX IF NOT EXISTS idx_relations_target_type ON object_relations(target_id, target_type);
-- Schema indexes
CREATE INDEX IF NOT EXISTS idx_schemas_jira_schema_id ON schemas(jira_schema_id);
CREATE INDEX IF NOT EXISTS idx_schemas_name ON schemas(name);
-- Schema mapping indexes
CREATE INDEX IF NOT EXISTS idx_schema_mappings_type ON schema_mappings(object_type_name);
CREATE INDEX IF NOT EXISTS idx_schema_mappings_schema ON schema_mappings(schema_id);
CREATE INDEX IF NOT EXISTS idx_schema_mappings_enabled ON schema_mappings(enabled);
`;
export const NORMALIZED_SCHEMA_SQLITE = `
-- =============================================================================
-- SQLite version (for development/testing)
-- =============================================================================
CREATE TABLE IF NOT EXISTS schemas (
id INTEGER PRIMARY KEY AUTOINCREMENT,
jira_schema_id TEXT NOT NULL UNIQUE,
name TEXT NOT NULL,
object_schema_key TEXT,
status TEXT,
description TEXT,
search_enabled INTEGER NOT NULL DEFAULT 1,
discovered_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);
CREATE TABLE IF NOT EXISTS object_types (
id INTEGER PRIMARY KEY AUTOINCREMENT,
schema_id INTEGER NOT NULL,
jira_type_id INTEGER NOT NULL,
type_name TEXT NOT NULL,
display_name TEXT NOT NULL,
description TEXT,
sync_priority INTEGER DEFAULT 0,
object_count INTEGER DEFAULT 0,
enabled INTEGER NOT NULL DEFAULT 0,
discovered_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now')),
UNIQUE(schema_id, jira_type_id),
UNIQUE(schema_id, type_name),
FOREIGN KEY (schema_id) REFERENCES schemas(id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS attributes (
id INTEGER PRIMARY KEY AUTOINCREMENT,
jira_attr_id INTEGER NOT NULL,
object_type_name TEXT NOT NULL,
attr_name TEXT NOT NULL,
field_name TEXT NOT NULL,
attr_type TEXT NOT NULL,
is_multiple INTEGER NOT NULL DEFAULT 0,
is_editable INTEGER NOT NULL DEFAULT 1,
is_required INTEGER NOT NULL DEFAULT 0,
is_system INTEGER NOT NULL DEFAULT 0,
reference_type_name TEXT,
description TEXT,
position INTEGER DEFAULT 0,
discovered_at TEXT NOT NULL DEFAULT (datetime('now')),
UNIQUE(jira_attr_id, object_type_name)
);
CREATE TABLE IF NOT EXISTS objects (
id TEXT PRIMARY KEY,
object_key TEXT NOT NULL UNIQUE,
object_type_name TEXT NOT NULL,
label TEXT NOT NULL,
jira_updated_at TEXT,
jira_created_at TEXT,
cached_at TEXT NOT NULL DEFAULT (datetime('now'))
);
CREATE TABLE IF NOT EXISTS attribute_values (
id INTEGER PRIMARY KEY AUTOINCREMENT,
object_id TEXT NOT NULL,
attribute_id INTEGER NOT NULL,
text_value TEXT,
number_value REAL,
boolean_value INTEGER,
date_value TEXT,
datetime_value TEXT,
reference_object_id TEXT,
reference_object_key TEXT,
reference_object_label TEXT,
array_index INTEGER DEFAULT 0,
UNIQUE(object_id, attribute_id, array_index),
FOREIGN KEY (object_id) REFERENCES objects(id) ON DELETE CASCADE,
FOREIGN KEY (attribute_id) REFERENCES attributes(id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS object_relations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
source_id TEXT NOT NULL,
target_id TEXT NOT NULL,
attribute_id INTEGER NOT NULL,
source_type TEXT NOT NULL,
target_type TEXT NOT NULL,
UNIQUE(source_id, target_id, attribute_id),
FOREIGN KEY (source_id) REFERENCES objects(id) ON DELETE CASCADE,
FOREIGN KEY (target_id) REFERENCES objects(id) ON DELETE CASCADE,
FOREIGN KEY (attribute_id) REFERENCES attributes(id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS schema_mappings (
object_type_name TEXT PRIMARY KEY,
schema_id TEXT NOT NULL,
enabled INTEGER NOT NULL DEFAULT 1,
created_at TEXT NOT NULL DEFAULT (datetime('now')),
updated_at TEXT NOT NULL DEFAULT (datetime('now'))
);
CREATE TABLE IF NOT EXISTS sync_metadata (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at TEXT NOT NULL
);
-- Indexes
CREATE INDEX IF NOT EXISTS idx_objects_type ON objects(object_type_name);
CREATE INDEX IF NOT EXISTS idx_objects_key ON objects(object_key);
CREATE INDEX IF NOT EXISTS idx_objects_label ON objects(label);
CREATE INDEX IF NOT EXISTS idx_attributes_type ON attributes(object_type_name);
CREATE INDEX IF NOT EXISTS idx_attributes_field ON attributes(field_name);
CREATE INDEX IF NOT EXISTS idx_attributes_jira_id ON attributes(jira_attr_id);
CREATE INDEX IF NOT EXISTS idx_attributes_type_field ON attributes(object_type_name, field_name);
CREATE INDEX IF NOT EXISTS idx_attr_values_object ON attribute_values(object_id);
CREATE INDEX IF NOT EXISTS idx_attr_values_attr ON attribute_values(attribute_id);
CREATE INDEX IF NOT EXISTS idx_attr_values_text ON attribute_values(text_value);
CREATE INDEX IF NOT EXISTS idx_attr_values_number ON attribute_values(number_value);
CREATE INDEX IF NOT EXISTS idx_attr_values_reference ON attribute_values(reference_object_id);
CREATE INDEX IF NOT EXISTS idx_attr_values_object_attr ON attribute_values(object_id, attribute_id);
CREATE INDEX IF NOT EXISTS idx_relations_source ON object_relations(source_id);
CREATE INDEX IF NOT EXISTS idx_relations_target ON object_relations(target_id);
CREATE INDEX IF NOT EXISTS idx_relations_attr ON object_relations(attribute_id);
-- Schema indexes
CREATE INDEX IF NOT EXISTS idx_schemas_jira_schema_id ON schemas(jira_schema_id);
CREATE INDEX IF NOT EXISTS idx_schemas_name ON schemas(name);
CREATE INDEX IF NOT EXISTS idx_schemas_search_enabled ON schemas(search_enabled);
-- Object type indexes
CREATE INDEX IF NOT EXISTS idx_object_types_type_name ON object_types(type_name);
CREATE INDEX IF NOT EXISTS idx_object_types_jira_id ON object_types(jira_type_id);
CREATE INDEX IF NOT EXISTS idx_object_types_schema_id ON object_types(schema_id);
CREATE INDEX IF NOT EXISTS idx_object_types_sync_priority ON object_types(sync_priority);
CREATE INDEX IF NOT EXISTS idx_object_types_enabled ON object_types(enabled);
CREATE INDEX IF NOT EXISTS idx_object_types_schema_enabled ON object_types(schema_id, enabled);
-- Schema mapping indexes
CREATE INDEX IF NOT EXISTS idx_schema_mappings_type ON schema_mappings(object_type_name);
CREATE INDEX IF NOT EXISTS idx_schema_mappings_schema ON schema_mappings(schema_id);
CREATE INDEX IF NOT EXISTS idx_schema_mappings_enabled ON schema_mappings(enabled);
`;

View File

@@ -9,6 +9,7 @@ import { logger } from '../logger.js';
import type { DatabaseAdapter } from './interface.js';
export class PostgresAdapter implements DatabaseAdapter {
public readonly isPostgres = true; // Indicates this is PostgreSQL
private pool: Pool;
private connectionString: string;
private isClosed: boolean = false;
@@ -72,6 +73,7 @@ export class PostgresAdapter implements DatabaseAdapter {
// Create a transaction-scoped adapter
const transactionAdapter: DatabaseAdapter = {
isPostgres: true, // Indicates this is PostgreSQL
query: async (sql: string, params?: any[]) => {
const convertedSql = this.convertPlaceholders(sql);
const result = await client.query(convertedSql, params);
@@ -102,9 +104,16 @@ export class PostgresAdapter implements DatabaseAdapter {
const result = await callback(transactionAdapter);
await client.query('COMMIT');
return result;
} catch (error) {
} catch (error: any) {
await client.query('ROLLBACK');
logger.error('PostgreSQL transaction error:', error);
// Don't log foreign key constraint errors as errors - they're expected and handled by caller
if (error?.code === '23503' || error?.message?.includes('foreign key constraint')) {
logger.debug('PostgreSQL transaction error (foreign key constraint - handled by caller):', error);
} else {
logger.error('PostgreSQL transaction error:', error);
}
throw error;
} finally {
client.release();
@@ -148,10 +157,13 @@ export class PostgresAdapter implements DatabaseAdapter {
async getSizeBytes(): Promise<number> {
try {
const result = await this.query<{ size: number }>(`
const result = await this.query<{ size: number | string }>(`
SELECT pg_database_size(current_database()) as size
`);
return result[0]?.size || 0;
// PostgreSQL returns bigint as string, ensure we convert to number
const size = result[0]?.size;
if (!size) return 0;
return typeof size === 'string' ? parseInt(size, 10) : Number(size);
} catch (error) {
logger.error('PostgreSQL getSizeBytes error:', error);
return 0;

View File

@@ -0,0 +1,28 @@
/**
* Database Adapter Singleton
*
* Provides a shared database adapter instance to prevent multiple connections.
* All services should use this singleton instead of creating their own adapters.
*/
import { createDatabaseAdapter } from './factory.js';
import type { DatabaseAdapter } from './interface.js';
let dbAdapterInstance: DatabaseAdapter | null = null;
/**
* Get the shared database adapter instance
*/
export function getDatabaseAdapter(): DatabaseAdapter {
if (!dbAdapterInstance) {
dbAdapterInstance = createDatabaseAdapter(undefined, undefined, false); // Don't allow close (singleton)
}
return dbAdapterInstance;
}
/**
* Reset the singleton (for testing only)
*/
export function resetDatabaseAdapter(): void {
dbAdapterInstance = null;
}

View File

@@ -98,6 +98,63 @@ class JiraAssetsService {
private applicationFunctionCategoriesCache: Map<string, ReferenceValue> | null = null;
// Cache: Dynamics Factors with factors
private dynamicsFactorsCache: Map<string, ReferenceValue> | null = null;
/**
* Get schema ID for an object type from database
* Returns the schema ID of the first enabled object type with the given type name
*/
private async getSchemaIdForObjectType(typeName: string): Promise<string | null> {
try {
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const enabledTypes = await schemaConfigurationService.getEnabledObjectTypes();
const type = enabledTypes.find(et => et.objectTypeName === typeName);
return type?.schemaId || null;
} catch (error) {
logger.warn(`JiraAssets: Failed to get schema ID for ${typeName}`, error);
return null;
}
}
/**
* Get first available schema ID from database (fallback)
*/
private async getFirstSchemaId(): Promise<string | null> {
try {
const { normalizedCacheStore } = await import('./normalizedCacheStore.js');
const db = (normalizedCacheStore as any).db;
if (!db) return null;
await db.ensureInitialized?.();
const schemaRow = await db.queryOne<{ jira_schema_id: string }>(
`SELECT jira_schema_id FROM schemas ORDER BY jira_schema_id LIMIT 1`
);
return schemaRow?.jira_schema_id || null;
} catch (error) {
logger.warn('JiraAssets: Failed to get first schema ID', error);
return null;
}
}
/**
* Get all available schema IDs from database that are enabled for searching
*/
private async getAllSchemaIds(): Promise<string[]> {
try {
const { normalizedCacheStore } = await import('./normalizedCacheStore.js');
const db = (normalizedCacheStore as any).db;
if (!db) return [];
await db.ensureInitialized?.();
const schemaRows = await db.query<{ jira_schema_id: string }>(
`SELECT DISTINCT jira_schema_id FROM schemas WHERE search_enabled = ? ORDER BY jira_schema_id`,
[db.isPostgres ? true : 1]
);
return schemaRows.map(row => row.jira_schema_id);
} catch (error) {
logger.warn('JiraAssets: Failed to get all schema IDs', error);
return [];
}
}
// Cache: Complexity Factors with factors
private complexityFactorsCache: Map<string, ReferenceValue> | null = null;
// Cache: Number of Users with factors
@@ -109,7 +166,7 @@ class JiraAssetsService {
// Cache: Team dashboard data
private teamDashboardCache: { data: TeamDashboardData; timestamp: number } | null = null;
private readonly TEAM_DASHBOARD_CACHE_TTL = 5 * 60 * 1000; // 5 minutes
// Cache: Dashboard stats
private dashboardStatsCache: {
data: {
totalApplications: number;
@@ -121,6 +178,8 @@ class JiraAssetsService {
timestamp: number
} | null = null;
private readonly DASHBOARD_STATS_CACHE_TTL = 3 * 60 * 1000; // 3 minutes
// Warming lock to prevent multiple simultaneous warming operations
private isWarming: boolean = false;
constructor() {
// Try both API paths - Insight (Data Center) and Assets (Cloud)
@@ -742,7 +801,7 @@ class JiraAssetsService {
try {
await this.detectApiType();
const url = `/object/${embeddedRefObj.id}?includeAttributes=true&includeAttributesDeep=1`;
const url = `/object/${embeddedRefObj.id}?includeAttributes=true&includeAttributesDeep=2`;
const refObj = await this.request<JiraAssetsObject>(url);
if (refObj) {
@@ -1337,6 +1396,12 @@ class JiraAssetsService {
logger.info(`Searching applications with query: ${qlQuery}`);
logger.debug(`Filters: ${JSON.stringify(filters)}`);
// Get schema ID for ApplicationComponent from database
const schemaId = await this.getSchemaIdForObjectType('ApplicationComponent') || await this.getFirstSchemaId();
if (!schemaId) {
throw new Error('No schema ID available. Please configure object types in Schema Configuration settings.');
}
let response: JiraAssetsSearchResponse;
if (this.isDataCenter) {
@@ -1347,8 +1412,8 @@ class JiraAssetsService {
page: page.toString(),
resultPerPage: pageSize.toString(),
includeAttributes: 'true',
includeAttributesDeep: '1',
objectSchemaId: config.jiraSchemaId,
includeAttributesDeep: '2',
objectSchemaId: schemaId,
});
logger.debug(`IQL request: /iql/objects?${params.toString()}`);
@@ -1368,7 +1433,10 @@ class JiraAssetsService {
'/aql/objects',
{
method: 'POST',
body: JSON.stringify(requestBody),
body: JSON.stringify({
...requestBody,
objectSchemaId: schemaId,
}),
}
);
}
@@ -1665,10 +1733,29 @@ class JiraAssetsService {
}
}
async getReferenceObjects(objectType: string): Promise<ReferenceValue[]> {
async getReferenceObjects(objectType: string, schemaId?: string): Promise<ReferenceValue[]> {
try {
await this.detectApiType();
// Get schema ID from mapping service if not provided
let effectiveSchemaId = schemaId;
if (!effectiveSchemaId) {
const { schemaMappingService } = await import('./schemaMappingService.js');
const { OBJECT_TYPES } = await import('../generated/jira-schema.js');
// Find the typeName from the objectType (display name)
let typeName: string | null = null;
for (const [key, def] of Object.entries(OBJECT_TYPES)) {
if (def.name === objectType) {
typeName = key;
break;
}
}
// Use typeName if found, otherwise fall back to objectType
effectiveSchemaId = await schemaMappingService.getSchemaId(typeName || objectType);
}
const qlQuery = `objectType = "${objectType}"`;
let response: JiraAssetsSearchResponse;
@@ -1678,8 +1765,8 @@ class JiraAssetsService {
iql: qlQuery,
resultPerPage: '200',
includeAttributes: 'true',
includeAttributesDeep: '1',
objectSchemaId: config.jiraSchemaId,
includeAttributesDeep: '2',
objectSchemaId: effectiveSchemaId,
});
response = await this.request<JiraAssetsSearchResponse>(
@@ -1695,6 +1782,7 @@ class JiraAssetsService {
qlQuery,
resultPerPage: 200,
includeAttributes: true,
objectSchemaId: effectiveSchemaId,
}),
}
);
@@ -1718,6 +1806,50 @@ class JiraAssetsService {
}
}
// Cache objects to normalized cache store for better performance
// This ensures objects are available in the database cache, not just in-memory
// This prevents individual API calls later when these objects are needed
if (response.objectEntries.length > 0) {
try {
const { OBJECT_TYPES } = await import('../generated/jira-schema.js');
// Find the CMDBObjectTypeName for this objectType
let typeName: CMDBObjectTypeName | null = null;
for (const [key, def] of Object.entries(OBJECT_TYPES)) {
if (def.name === objectType) {
typeName = key as CMDBObjectTypeName;
break;
}
}
if (typeName) {
// Parse and cache objects in batch using the same business logic as sync
const { jiraAssetsClient } = await import('./jiraAssetsClient.js');
const { normalizedCacheStore } = await import('./normalizedCacheStore.js');
const parsedObjects = await Promise.all(
response.objectEntries.map(obj => jiraAssetsClient.parseObject(obj))
);
const validParsedObjects = parsedObjects.filter((obj): obj is any => obj !== null);
if (validParsedObjects.length > 0) {
// Batch upsert to cache (same as sync engine)
await normalizedCacheStore.batchUpsertObjects(typeName, validParsedObjects);
// Extract and store relations for all objects (same as sync engine)
for (const obj of validParsedObjects) {
await normalizedCacheStore.extractAndStoreRelations(typeName, obj);
}
logger.debug(`Cached ${validParsedObjects.length} ${objectType} objects to normalized cache with relations`);
}
}
} catch (error) {
// Don't fail if caching fails - this is an optimization
logger.debug(`Failed to cache ${objectType} objects to normalized cache`, error);
}
}
const results = response.objectEntries.map((obj) => {
// Extract Description attribute (try multiple possible attribute names)
// Use attrSchema for fallback lookup by attribute ID
@@ -1926,6 +2058,12 @@ class JiraAssetsService {
teamsById.set(team.objectId, team);
}
// Get schema ID for ApplicationComponent
const schemaId = await this.getSchemaIdForObjectType('ApplicationComponent') || await this.getFirstSchemaId();
if (!schemaId) {
throw new Error('No schema ID available. Please configure object types in Schema Configuration settings.');
}
let response: JiraAssetsSearchResponse;
if (this.isDataCenter) {
@@ -1933,7 +2071,7 @@ class JiraAssetsService {
iql,
resultPerPage: '500',
includeAttributes: 'true',
objectSchemaId: config.jiraSchemaId,
objectSchemaId: schemaId,
});
response = await this.request<JiraAssetsSearchResponse>(
`/iql/objects?${params.toString()}`
@@ -2081,8 +2219,52 @@ class JiraAssetsService {
return null;
}
// Check if there's already a pending request for this object (deduplicate concurrent requests)
// Check both objectIdToFetch and the alternate key (if both are provided)
const pendingRequest = this.pendingReferenceRequests.get(objectIdToFetch)
|| (objectId && objectKey && objectId !== objectKey ? this.pendingReferenceRequests.get(objectKey) : undefined)
|| (objectId && objectKey && objectId !== objectKey ? this.pendingReferenceRequests.get(objectId) : undefined);
if (pendingRequest) {
logger.debug(`fetchEnrichedReferenceValue: Reusing pending request for ${objectKey} (${objectIdToFetch})`);
return pendingRequest;
}
// Create a new fetch promise and store it in pending requests
// Store by both keys if they differ to catch all concurrent requests
const fetchPromise = this.doFetchEnrichedReferenceValue(objectKey, objectId, objectIdToFetch, cachedByKey, cachedById);
this.pendingReferenceRequests.set(objectIdToFetch, fetchPromise);
if (objectId && objectKey && objectId !== objectKey) {
// Also store by the alternate key to deduplicate requests that use the other key
this.pendingReferenceRequests.set(objectKey, fetchPromise);
this.pendingReferenceRequests.set(objectId, fetchPromise);
}
try {
const url = `/object/${objectIdToFetch}?includeAttributes=true&includeAttributesDeep=1`;
const result = await fetchPromise;
return result;
} finally {
// Remove from pending requests once done (success or failure)
this.pendingReferenceRequests.delete(objectIdToFetch);
if (objectId && objectKey && objectId !== objectKey) {
this.pendingReferenceRequests.delete(objectKey);
this.pendingReferenceRequests.delete(objectId);
}
}
}
/**
* Internal method to actually fetch the enriched reference value (called by fetchEnrichedReferenceValue)
*/
private async doFetchEnrichedReferenceValue(
objectKey: string,
objectId: string | undefined,
objectIdToFetch: string,
cachedByKey: ReferenceValue | undefined,
cachedById: ReferenceValue | undefined
): Promise<ReferenceValue | null> {
try {
const url = `/object/${objectIdToFetch}?includeAttributes=true&includeAttributesDeep=2`;
const refObj = await this.request<JiraAssetsObject>(url);
if (!refObj) {
@@ -2170,6 +2352,12 @@ class JiraAssetsService {
logger.info('Dashboard stats: Cache miss or expired, fetching fresh data');
try {
// Get schema ID for ApplicationComponent
const schemaId = await this.getSchemaIdForObjectType('ApplicationComponent') || await this.getFirstSchemaId();
if (!schemaId) {
throw new Error('No schema ID available. Please configure object types in Schema Configuration settings.');
}
const allAppsQuery = 'objectType = "Application Component" AND Status != "Closed"';
// First, get total count with a single query
@@ -2179,7 +2367,7 @@ class JiraAssetsService {
iql: allAppsQuery,
resultPerPage: '1',
includeAttributes: 'true',
objectSchemaId: config.jiraSchemaId,
objectSchemaId: schemaId,
});
totalCountResponse = await this.request<JiraAssetsSearchResponse>(
`/iql/objects?${params.toString()}`
@@ -2193,6 +2381,7 @@ class JiraAssetsService {
qlQuery: allAppsQuery,
resultPerPage: 1,
includeAttributes: true,
objectSchemaId: schemaId,
}),
}
);
@@ -2222,7 +2411,7 @@ class JiraAssetsService {
iql: sampleQuery,
resultPerPage: '1',
includeAttributes: 'true',
objectSchemaId: config.jiraSchemaId,
objectSchemaId: schemaId,
});
sampleResponse = await this.request<JiraAssetsSearchResponse>(
`/iql/objects?${sampleParams.toString()}`
@@ -2232,14 +2421,15 @@ class JiraAssetsService {
'/aql/objects',
{
method: 'POST',
body: JSON.stringify({
qlQuery: sampleQuery,
resultPerPage: 1,
includeAttributes: true,
}),
}
);
body: JSON.stringify({
qlQuery: sampleQuery,
resultPerPage: 1,
includeAttributes: true,
objectSchemaId: schemaId,
}),
}
);
}
if (sampleResponse.objectEntries && sampleResponse.objectEntries.length > 0) {
const firstObj = sampleResponse.objectEntries[0];
@@ -2273,7 +2463,7 @@ class JiraAssetsService {
resultPerPage: pageSize.toString(),
pageNumber: currentPage.toString(),
includeAttributes: 'true',
objectSchemaId: config.jiraSchemaId,
objectSchemaId: schemaId,
});
batchResponse = await this.request<JiraAssetsSearchResponse>(
`/iql/objects?${params.toString()}`
@@ -2288,6 +2478,7 @@ class JiraAssetsService {
resultPerPage: pageSize,
pageNumber: currentPage,
includeAttributes: true,
objectSchemaId: schemaId,
}),
}
);
@@ -2386,7 +2577,7 @@ class JiraAssetsService {
iql: classifiedQuery,
resultPerPage: '1',
includeAttributes: 'true',
objectSchemaId: config.jiraSchemaId,
objectSchemaId: schemaId,
});
classifiedResponse = await this.request<JiraAssetsSearchResponse>(
`/iql/objects?${params.toString()}`
@@ -2484,13 +2675,19 @@ class JiraAssetsService {
if (this.attributeSchemaCache.has(objectTypeName)) {
attrSchema = this.attributeSchemaCache.get(objectTypeName);
} else {
// Get schema ID for ApplicationComponent
const schemaId = await this.getSchemaIdForObjectType('ApplicationComponent') || await this.getFirstSchemaId();
if (!schemaId) {
throw new Error('No schema ID available. Please configure object types in Schema Configuration settings.');
}
const testParams = new URLSearchParams({
iql: qlQuery,
page: '1',
resultPerPage: '1',
includeAttributes: 'true',
includeAttributesDeep: '1',
objectSchemaId: config.jiraSchemaId,
includeAttributesDeep: '2',
objectSchemaId: schemaId,
});
const testResponse = await this.request<JiraAssetsSearchResponse>(
`/iql/objects?${testParams.toString()}`
@@ -2510,6 +2707,12 @@ class JiraAssetsService {
this.ensureFactorCaches(),
]);
// Get schema ID for ApplicationComponent
const schemaId = await this.getSchemaIdForObjectType('ApplicationComponent') || await this.getFirstSchemaId();
if (!schemaId) {
throw new Error('No schema ID available. Please configure object types in Schema Configuration settings.');
}
// Get total count
let firstResponse: JiraAssetsSearchResponse;
if (this.isDataCenter) {
@@ -2518,8 +2721,8 @@ class JiraAssetsService {
page: '1',
resultPerPage: '1',
includeAttributes: 'true',
includeAttributesDeep: '1',
objectSchemaId: config.jiraSchemaId,
includeAttributesDeep: '2',
objectSchemaId: schemaId,
});
firstResponse = await this.request<JiraAssetsSearchResponse>(
`/iql/objects?${params.toString()}`
@@ -2563,8 +2766,8 @@ class JiraAssetsService {
page: pageNum.toString(),
resultPerPage: batchSize.toString(),
includeAttributes: 'true',
includeAttributesDeep: '1',
objectSchemaId: config.jiraSchemaId,
includeAttributesDeep: '2',
objectSchemaId: schemaId,
});
response = await this.request<JiraAssetsSearchResponse>(
`/iql/objects?${params.toString()}`
@@ -2982,84 +3185,159 @@ class JiraAssetsService {
try {
await this.detectApiType();
// Use Insight AM search API endpoint (different from IQL)
const searchUrl = `${config.jiraHost}/rest/insight-am/1/search?` +
`schema=${config.jiraSchemaId}&` +
`criteria=${encodeURIComponent(query)}&` +
`criteriaType=FREETEXT&` +
`attributes=Key,Object+Type,Label,Name,Description,Status&` +
`offset=0&limit=${limit}`;
logger.info(`CMDB search API call - Query: "${query}", URL: ${searchUrl}`);
const response = await fetch(searchUrl, {
method: 'GET',
headers: this.headers,
});
if (!response.ok) {
const errorText = await response.text();
throw new Error(`Jira CMDB search error: ${response.status} - ${errorText}`);
// Get all available schema IDs to search across all schemas
const schemaIds = await this.getAllSchemaIds();
if (schemaIds.length === 0) {
// Fallback to first schema if no schemas found
const fallbackSchemaId = await this.getFirstSchemaId();
if (!fallbackSchemaId) {
throw new Error('No schema ID available. Please configure object types in Schema Configuration settings.');
}
schemaIds.push(fallbackSchemaId);
}
const data = await response.json() as {
results?: Array<{
id: number;
key: string;
label: string;
objectTypeId: number;
avatarUrl?: string;
attributes?: Array<{
id: number;
name: string;
objectTypeAttributeId: number;
values?: unknown[];
}>;
}>;
metadata?: {
count: number;
offset: number;
limit: number;
total: number;
criteria: unknown;
};
objectTypes?: Array<{
logger.info(`CMDB search: Searching across ${schemaIds.length} schema(s) for query: "${query}"`);
// Search each schema and collect results
const searchPromises = schemaIds.map(async (schemaId) => {
try {
// Use Insight AM search API endpoint (different from IQL)
const searchUrl = `${config.jiraHost}/rest/insight-am/1/search?` +
`schema=${schemaId}&` +
`criteria=${encodeURIComponent(query)}&` +
`criteriaType=FREETEXT&` +
`attributes=Key,Object+Type,Label,Name,Description,Status&` +
`offset=0&limit=${limit}`;
logger.debug(`CMDB search API call - Schema: ${schemaId}, Query: "${query}", URL: ${searchUrl}`);
const response = await fetch(searchUrl, {
method: 'GET',
headers: this.headers,
});
if (!response.ok) {
const errorText = await response.text();
logger.warn(`CMDB search failed for schema ${schemaId}: ${response.status} - ${errorText}`);
return null; // Return null for failed schemas, we'll continue with others
}
const data = await response.json() as {
results?: Array<{
id: number;
key: string;
label: string;
objectTypeId: number;
avatarUrl?: string;
attributes?: Array<{
id: number;
name: string;
objectTypeAttributeId: number;
values?: unknown[];
}>;
}>;
metadata?: {
count: number;
offset: number;
limit: number;
total: number;
criteria: unknown;
};
objectTypes?: Array<{
id: number;
name: string;
iconUrl?: string;
}>;
};
return {
schemaId,
results: data.results || [],
objectTypes: data.objectTypes || [],
metadata: data.metadata,
};
} catch (error) {
logger.warn(`CMDB search error for schema ${schemaId}:`, error);
return null; // Return null for failed schemas, we'll continue with others
}
});
// Wait for all searches to complete
const searchResults = await Promise.all(searchPromises);
// Merge results from all schemas
const allResults: Array<{
id: number;
key: string;
label: string;
objectTypeId: number;
avatarUrl?: string;
attributes: Array<{
id: number;
name: string;
iconUrl?: string;
objectTypeAttributeId: number;
values: unknown[];
}>;
};
}> = [];
const objectTypeMap = new Map<number, { id: number; name: string; iconUrl?: string }>();
let totalCount = 0;
// Transform the response to a cleaner format
// The API returns attributes with nested structure, we flatten the values
const transformedResults = (data.results || []).map((result) => ({
id: result.id,
key: result.key,
label: result.label,
objectTypeId: result.objectTypeId,
avatarUrl: result.avatarUrl,
attributes: (result.attributes || []).map((attr) => ({
id: attr.id,
name: attr.name,
objectTypeAttributeId: attr.objectTypeAttributeId,
values: attr.values || [],
})),
}));
for (const result of searchResults) {
if (!result) continue; // Skip failed schemas
// Add results (avoid duplicates by key)
const existingKeys = new Set(allResults.map(r => r.key));
for (const item of result.results) {
if (!existingKeys.has(item.key)) {
allResults.push({
id: item.id,
key: item.key,
label: item.label,
objectTypeId: item.objectTypeId,
avatarUrl: item.avatarUrl,
attributes: (item.attributes || []).map((attr) => ({
id: attr.id,
name: attr.name,
objectTypeAttributeId: attr.objectTypeAttributeId,
values: attr.values || [],
})),
});
existingKeys.add(item.key);
}
}
// Merge object types (avoid duplicates by id)
for (const ot of result.objectTypes) {
if (!objectTypeMap.has(ot.id)) {
objectTypeMap.set(ot.id, {
id: ot.id,
name: ot.name,
iconUrl: ot.iconUrl,
});
}
}
// Sum up total counts
if (result.metadata?.total) {
totalCount += result.metadata.total;
}
}
// Apply limit to merged results
const limitedResults = allResults.slice(0, limit);
logger.info(`CMDB search: Found ${limitedResults.length} results (${allResults.length} total before limit) across ${schemaIds.length} schema(s)`);
return {
metadata: data.metadata || {
count: transformedResults.length,
metadata: {
count: limitedResults.length,
offset: 0,
limit,
total: transformedResults.length,
criteria: { query, type: 'FREETEXT', schema: parseInt(config.jiraSchemaId, 10) },
total: totalCount || limitedResults.length,
criteria: { query, type: 'FREETEXT', schema: schemaIds.length > 0 ? parseInt(schemaIds[0], 10) : 0 },
},
objectTypes: (data.objectTypes || []).map((ot) => ({
id: ot.id,
name: ot.name,
iconUrl: ot.iconUrl,
})),
results: transformedResults,
objectTypes: Array.from(objectTypeMap.values()),
results: limitedResults,
};
} catch (error) {
logger.error('CMDB search failed', error);
@@ -3099,12 +3377,18 @@ class JiraAssetsService {
let response: JiraAssetsSearchResponse;
if (this.isDataCenter) {
// Get schema ID for the object type (or first available)
const schemaId = await this.getSchemaIdForObjectType(objectType) || await this.getFirstSchemaId();
if (!schemaId) {
throw new Error('No schema ID available. Please configure object types in Schema Configuration settings.');
}
const params = new URLSearchParams({
iql: iqlQuery,
resultPerPage: '100',
includeAttributes: 'true',
includeAttributesDeep: '1',
objectSchemaId: config.jiraSchemaId,
includeAttributesDeep: '2',
objectSchemaId: schemaId,
});
response = await this.request<JiraAssetsSearchResponse>(
@@ -3206,37 +3490,56 @@ class JiraAssetsService {
}
/**
* Pre-warm the team dashboard cache in background
* This is called on server startup so users don't experience slow first load
* Pre-warm the full cache using sync engine
* This is more efficient than pre-warming just the team dashboard
* as it syncs all object types and their relations
* Checks cache status first to avoid unnecessary syncs
*/
async preWarmTeamDashboardCache(): Promise<void> {
async preWarmFullCache(): Promise<void> {
// Prevent multiple simultaneous warming operations
if (this.isWarming) {
logger.debug('Cache warming already in progress, skipping duplicate request');
return;
}
try {
// Only pre-warm if cache is empty
if (this.teamDashboardCache) {
logger.info('Team dashboard cache already warm, skipping pre-warm');
this.isWarming = true;
// Check if schema configuration is complete before attempting sync
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const isConfigured = await schemaConfigurationService.isConfigurationComplete();
if (!isConfigured) {
logger.info('Schema configuration not complete, skipping automatic cache pre-warming. Please configure object types in settings first.');
return;
}
logger.info('Pre-warming team dashboard cache in background...');
// Check if cache is already warm before syncing
const { normalizedCacheStore } = await import('./normalizedCacheStore.js');
const isWarm = await normalizedCacheStore.isWarm();
if (isWarm) {
logger.info('Cache is already warm, skipping pre-warm');
return;
}
logger.info('Pre-warming full cache in background using sync engine...');
const startTime = Date.now();
// Fetch with default excluded statuses (which is what most users will see)
await this.getTeamDashboardData(['Closed', 'Deprecated']);
const { syncEngine } = await import('./syncEngine.js');
await syncEngine.fullSync();
const duration = Date.now() - startTime;
logger.info(`Team dashboard cache pre-warmed in ${duration}ms`);
logger.info(`Full cache pre-warmed in ${duration}ms`);
} catch (error) {
logger.error('Failed to pre-warm team dashboard cache', error);
logger.error('Failed to pre-warm full cache', error);
// Don't throw - pre-warming is optional
} finally {
this.isWarming = false;
}
}
}
export const jiraAssetsService = new JiraAssetsService();
// Pre-warm team dashboard cache on startup (runs in background, doesn't block server start)
setTimeout(() => {
jiraAssetsService.preWarmTeamDashboardCache().catch(() => {
// Error already logged in the method
});
}, 5000); // Wait 5 seconds after server start to avoid competing with other initialization
// Note: Pre-warm cache removed - all syncs must be triggered manually from GUI
// The preWarmFullCache() method is still available for manual API calls but won't auto-start

View File

@@ -7,9 +7,12 @@
import { config } from '../config/env.js';
import { logger } from './logger.js';
import { OBJECT_TYPES } from '../generated/jira-schema.js';
import type { CMDBObject, CMDBObjectTypeName, ObjectReference } from '../generated/jira-types.js';
import { schemaCacheService } from './schemaCacheService.js';
import type { CMDBObject, ObjectReference } from '../generated/jira-types.js';
import type { JiraAssetsObject, JiraAssetsAttribute, JiraAssetsSearchResponse } from '../types/index.js';
import type { ObjectEntry, ObjectAttribute, ObjectAttributeValue, ReferenceValue, ConfluenceValue } from '../domain/jiraAssetsPayload.js';
import { isReferenceValue, isSimpleValue, hasAttributes } from '../domain/jiraAssetsPayload.js';
import { normalizedCacheStore } from './normalizedCacheStore.js';
// =============================================================================
// Types
@@ -31,14 +34,39 @@ export interface JiraUpdatePayload {
}>;
}
// Map from Jira object type ID to our type name
const TYPE_ID_TO_NAME: Record<number, CMDBObjectTypeName> = {};
const JIRA_NAME_TO_TYPE: Record<string, CMDBObjectTypeName> = {};
// Lookup maps - will be populated dynamically from database schema
let TYPE_ID_TO_NAME: Record<number, string> = {};
let JIRA_NAME_TO_TYPE: Record<string, string> = {};
let OBJECT_TYPES_CACHE: Record<string, { jiraTypeId: number; name: string; attributes: Array<{ jiraId: number; name: string; fieldName: string; type: string; isMultiple?: boolean }> }> = {};
// Build lookup maps from schema
for (const [typeName, typeDef] of Object.entries(OBJECT_TYPES)) {
TYPE_ID_TO_NAME[typeDef.jiraTypeId] = typeName as CMDBObjectTypeName;
JIRA_NAME_TO_TYPE[typeDef.name] = typeName as CMDBObjectTypeName;
/**
* Initialize lookup maps from database schema
*/
async function initializeLookupMaps(): Promise<void> {
try {
const schema = await schemaCacheService.getSchema();
OBJECT_TYPES_CACHE = {};
TYPE_ID_TO_NAME = {};
JIRA_NAME_TO_TYPE = {};
for (const [typeName, typeDef] of Object.entries(schema.objectTypes)) {
OBJECT_TYPES_CACHE[typeName] = {
jiraTypeId: typeDef.jiraTypeId,
name: typeDef.name,
attributes: typeDef.attributes.map(attr => ({
jiraId: attr.jiraId,
name: attr.name,
fieldName: attr.fieldName,
type: attr.type,
isMultiple: attr.isMultiple,
})),
};
TYPE_ID_TO_NAME[typeDef.jiraTypeId] = typeName;
JIRA_NAME_TO_TYPE[typeDef.name] = typeName;
}
} catch (error) {
logger.error('JiraAssetsClient: Failed to initialize lookup maps', error);
}
}
// =============================================================================
@@ -181,7 +209,8 @@ class JiraAssetsClient {
try {
await this.detectApiType();
const response = await fetch(`${this.baseUrl}/objectschema/${config.jiraSchemaId}`, {
// Test connection by fetching schemas list (no specific schema ID needed)
const response = await fetch(`${this.baseUrl}/objectschema/list`, {
headers: this.getHeaders(false), // Read operation - uses service account token
});
return response.ok;
@@ -191,17 +220,35 @@ class JiraAssetsClient {
}
}
async getObject(objectId: string): Promise<JiraAssetsObject | null> {
/**
* Get raw ObjectEntry for an object (for recursive processing)
*/
async getObjectEntry(objectId: string): Promise<ObjectEntry | null> {
try {
// Include attributes and deep attributes to get full details of referenced objects (including descriptions)
const url = `/object/${objectId}?includeAttributes=true&includeAttributesDeep=1`;
return await this.request<JiraAssetsObject>(url, {}, false); // Read operation
const url = `/object/${objectId}?includeAttributes=true&includeAttributesDeep=2`;
const entry = await this.request<ObjectEntry>(url, {}, false) as unknown as ObjectEntry; // Read operation
return entry;
} catch (error) {
// Check if this is a 404 (object not found / deleted)
if (error instanceof Error && error.message.includes('404')) {
logger.info(`JiraAssetsClient: Object ${objectId} not found in Jira (likely deleted)`);
throw new JiraObjectNotFoundError(objectId);
}
logger.error(`JiraAssetsClient: Failed to get object entry ${objectId}`, error);
return null;
}
}
async getObject(objectId: string): Promise<JiraAssetsObject | null> {
try {
const entry = await this.getObjectEntry(objectId);
if (!entry) return null;
return this.adaptObjectEntryToJiraAssetsObject(entry);
} catch (error) {
if (error instanceof JiraObjectNotFoundError) {
throw error;
}
logger.error(`JiraAssetsClient: Failed to get object ${objectId}`, error);
return null;
}
@@ -210,11 +257,26 @@ class JiraAssetsClient {
async searchObjects(
iql: string,
page: number = 1,
pageSize: number = 50
): Promise<{ objects: JiraAssetsObject[]; totalCount: number; hasMore: boolean }> {
pageSize: number = 50,
schemaId?: string
): Promise<{
objects: JiraAssetsObject[];
totalCount: number;
hasMore: boolean;
referencedObjects?: Array<{ entry: ObjectEntry; typeName: string }>;
rawEntries?: ObjectEntry[]; // Raw ObjectEntry format for recursive processing
}> {
await this.detectApiType();
let response: JiraAssetsSearchResponse;
// Schema ID must be provided explicitly (no default from config)
if (!schemaId) {
throw new Error('Schema ID is required for searchObjects. Please provide schemaId parameter.');
}
const effectiveSchemaId = schemaId;
// Use domain types for API requests
let payload: { objectEntries: ObjectEntry[]; totalCount?: number; totalFilterCount?: number; page?: number; pageSize?: number };
if (this.isDataCenter) {
// Try modern AQL endpoint first
@@ -224,10 +286,10 @@ class JiraAssetsClient {
page: page.toString(),
resultPerPage: pageSize.toString(),
includeAttributes: 'true',
includeAttributesDeep: '1',
objectSchemaId: config.jiraSchemaId,
includeAttributesDeep: '2',
objectSchemaId: effectiveSchemaId,
});
response = await this.request<JiraAssetsSearchResponse>(`/aql/objects?${params.toString()}`, {}, false); // Read operation
payload = await this.request<{ objectEntries: ObjectEntry[]; totalCount?: number; totalFilterCount?: number }>(`/aql/objects?${params.toString()}`, {}, false); // Read operation
} catch (error) {
// Fallback to deprecated IQL endpoint
logger.warn(`JiraAssetsClient: AQL endpoint failed, falling back to IQL: ${error}`);
@@ -236,51 +298,169 @@ class JiraAssetsClient {
page: page.toString(),
resultPerPage: pageSize.toString(),
includeAttributes: 'true',
includeAttributesDeep: '1',
objectSchemaId: config.jiraSchemaId,
includeAttributesDeep: '2',
objectSchemaId: effectiveSchemaId,
});
response = await this.request<JiraAssetsSearchResponse>(`/iql/objects?${params.toString()}`, {}, false); // Read operation
payload = await this.request<{ objectEntries: ObjectEntry[]; totalCount?: number; totalFilterCount?: number }>(`/iql/objects?${params.toString()}`, {}, false); // Read operation
}
} else {
// Jira Cloud uses POST for AQL
response = await this.request<JiraAssetsSearchResponse>('/aql/objects', {
payload = await this.request<{ objectEntries: ObjectEntry[]; totalCount?: number; totalFilterCount?: number }>('/aql/objects', {
method: 'POST',
body: JSON.stringify({
qlQuery: iql,
page,
resultPerPage: pageSize,
includeAttributes: true,
includeAttributesDeep: 1, // Include attributes of referenced objects (e.g., descriptions)
includeAttributesDeep: 2, // Include attributes of referenced objects (e.g., descriptions)
objectSchemaId: effectiveSchemaId,
}),
}, false); // Read operation
}
// Adapt to legacy response format for backward compatibility
const response = this.adaptAssetsPayloadToSearchResponse({ ...payload, page, pageSize });
const totalCount = response.totalFilterCount || response.totalCount || 0;
const hasMore = response.objectEntries.length === pageSize && page * pageSize < totalCount;
// Note: referencedObjects extraction removed - recursive extraction now happens in storeObjectTree
// via extractNestedReferencedObjects, which processes the entire object tree recursively
return {
objects: response.objectEntries || [],
totalCount,
hasMore,
referencedObjects: undefined, // No longer used - recursive extraction handles this
rawEntries: payload.objectEntries || [], // Return raw entries for recursive processing
};
}
/**
* Recursively extract all nested referenced objects from an object entry
* This function traverses the object tree and extracts all referenced objects
* at any depth, preventing infinite loops with circular references.
*
* @param entry - The object entry to extract nested references from
* @param processedIds - Set of already processed object IDs (to prevent duplicates and circular refs)
* @param maxDepth - Maximum depth to traverse (default: 5)
* @param currentDepth - Current depth in the tree (default: 0)
* @returns Array of extracted referenced objects with their type names
*/
extractNestedReferencedObjects(
entry: ObjectEntry,
processedIds: Set<string>,
maxDepth: number = 5,
currentDepth: number = 0
): Array<{ entry: ObjectEntry; typeName: string }> {
const result: Array<{ entry: ObjectEntry; typeName: string }> = [];
// Prevent infinite recursion
if (currentDepth >= maxDepth) {
logger.debug(`JiraAssetsClient: [Recursive] Max depth (${maxDepth}) reached for object ${entry.objectKey || entry.id}`);
return result;
}
const entryId = String(entry.id);
// Skip if already processed (handles circular references)
if (processedIds.has(entryId)) {
logger.debug(`JiraAssetsClient: [Recursive] Skipping already processed object ${entry.objectKey || entry.id} (circular reference detected)`);
return result;
}
processedIds.add(entryId);
logger.debug(`JiraAssetsClient: [Recursive] Extracting nested references from ${entry.objectKey || entry.id} at depth ${currentDepth}`);
// Initialize lookup maps if needed
if (Object.keys(TYPE_ID_TO_NAME).length === 0) {
// This is async, but we can't make this function async without breaking the call chain
// So we'll initialize it before calling this function
logger.warn('JiraAssetsClient: TYPE_ID_TO_NAME not initialized, type resolution may fail');
}
// Extract referenced objects from attributes
if (entry.attributes) {
for (const attr of entry.attributes) {
for (const val of attr.objectAttributeValues) {
if (isReferenceValue(val) && hasAttributes(val.referencedObject)) {
const refId = String(val.referencedObject.id);
// Skip if already processed
if (processedIds.has(refId)) {
continue;
}
const refTypeId = val.referencedObject.objectType?.id;
const refTypeName = TYPE_ID_TO_NAME[refTypeId] ||
JIRA_NAME_TO_TYPE[val.referencedObject.objectType?.name];
if (refTypeName) {
logger.debug(`JiraAssetsClient: [Recursive] Found nested reference: ${val.referencedObject.objectKey || refId} of type ${refTypeName} at depth ${currentDepth + 1}`);
// Add this referenced object to results
result.push({
entry: val.referencedObject as ObjectEntry,
typeName: refTypeName,
});
// Recursively extract nested references from this referenced object
const nested = this.extractNestedReferencedObjects(
val.referencedObject as ObjectEntry,
processedIds,
maxDepth,
currentDepth + 1
);
result.push(...nested);
} else {
logger.debug(`JiraAssetsClient: [Recursive] Could not resolve type name for referenced object ${refId} (typeId: ${refTypeId}, typeName: ${val.referencedObject.objectType?.name})`);
}
}
}
}
}
if (result.length > 0) {
logger.debug(`JiraAssetsClient: [Recursive] Extracted ${result.length} nested references from ${entry.objectKey || entry.id} at depth ${currentDepth}`);
}
return result;
}
/**
* Get the total count of objects for a specific type from Jira Assets
* This is more efficient than fetching all objects when you only need the count
* @param typeName - Type name (from database, e.g. "ApplicationComponent")
* @param schemaId - Optional schema ID (if not provided, uses mapping or default)
*/
async getObjectCount(typeName: CMDBObjectTypeName): Promise<number> {
const typeDef = OBJECT_TYPES[typeName];
async getObjectCount(typeName: string, schemaId?: string): Promise<number> {
// Ensure lookup maps are initialized
if (Object.keys(OBJECT_TYPES_CACHE).length === 0) {
await initializeLookupMaps();
}
const typeDef = OBJECT_TYPES_CACHE[typeName];
if (!typeDef) {
logger.warn(`JiraAssetsClient: Unknown type ${typeName}`);
return 0;
}
try {
// Get schema ID from mapping service if not provided
let effectiveSchemaId = schemaId;
if (!effectiveSchemaId) {
const { schemaMappingService } = await import('./schemaMappingService.js');
effectiveSchemaId = await schemaMappingService.getSchemaId(typeName);
}
// Skip if no schema ID is available (object type not configured)
if (!effectiveSchemaId || effectiveSchemaId.trim() === '') {
logger.debug(`JiraAssetsClient: No schema ID configured for ${typeName}, returning 0`);
return 0;
}
const iql = `objectType = "${typeDef.name}"`;
// Use pageSize=1 to minimize data transfer, we only need the totalCount
const result = await this.searchObjects(iql, 1, 1);
logger.debug(`JiraAssetsClient: ${typeName} has ${result.totalCount} objects in Jira Assets`);
const result = await this.searchObjects(iql, 1, 1, effectiveSchemaId);
logger.debug(`JiraAssetsClient: ${typeName} has ${result.totalCount} objects in Jira Assets (schema: ${effectiveSchemaId})`);
return result.totalCount;
} catch (error) {
logger.error(`JiraAssetsClient: Failed to get count for ${typeName}`, error);
@@ -289,29 +469,64 @@ class JiraAssetsClient {
}
async getAllObjectsOfType(
typeName: CMDBObjectTypeName,
batchSize: number = 40
): Promise<JiraAssetsObject[]> {
const typeDef = OBJECT_TYPES[typeName];
if (!typeDef) {
logger.warn(`JiraAssetsClient: Unknown type ${typeName}`);
return [];
typeName: string,
batchSize: number = 40,
schemaId?: string
): Promise<{
objects: JiraAssetsObject[];
referencedObjects: Array<{ entry: ObjectEntry; typeName: string }>;
rawEntries?: ObjectEntry[]; // Raw ObjectEntry format for recursive processing
}> {
// If typeName is a display name (not in cache), use it directly for IQL query
// Otherwise, look up the type definition
let objectTypeName = typeName;
// Try to find in cache first
if (Object.keys(OBJECT_TYPES_CACHE).length === 0) {
await initializeLookupMaps();
}
const typeDef = OBJECT_TYPES_CACHE[typeName];
if (typeDef) {
objectTypeName = typeDef.name; // Use the Jira name from cache
} else {
// Type not in cache - assume typeName is already the Jira display name
logger.debug(`JiraAssetsClient: Type ${typeName} not in cache, using as display name directly`);
}
// Get schema ID from mapping service if not provided
let effectiveSchemaId = schemaId;
if (!effectiveSchemaId) {
const { schemaMappingService } = await import('./schemaMappingService.js');
effectiveSchemaId = await schemaMappingService.getSchemaId(typeName);
}
if (!effectiveSchemaId) {
throw new Error(`No schema ID available for object type ${typeName}`);
}
const allObjects: JiraAssetsObject[] = [];
const rawEntries: ObjectEntry[] = []; // Store raw entries for recursive processing
let page = 1;
let hasMore = true;
while (hasMore) {
const iql = `objectType = "${typeDef.name}"`;
const result = await this.searchObjects(iql, page, batchSize);
const iql = `objectType = "${objectTypeName}"`;
const result = await this.searchObjects(iql, page, batchSize, effectiveSchemaId);
allObjects.push(...result.objects);
// Collect raw entries for recursive processing
if (result.rawEntries) {
rawEntries.push(...result.rawEntries);
}
hasMore = result.hasMore;
page++;
}
logger.info(`JiraAssetsClient: Fetched ${allObjects.length} ${typeName} objects`);
return allObjects;
logger.info(`JiraAssetsClient: Fetched ${allObjects.length} ${typeName} objects from schema ${effectiveSchemaId} (raw entries: ${rawEntries.length})`);
// Note: referencedObjects no longer collected - recursive extraction in storeObjectTree handles nested objects
return { objects: allObjects, referencedObjects: [], rawEntries };
}
async getUpdatedObjectsSince(
@@ -357,38 +572,232 @@ class JiraAssetsClient {
}
}
// ==========================================================================
// Adapter Functions (temporary - for backward compatibility)
// ==========================================================================
/**
* Adapt ObjectEntry from domain types to legacy JiraAssetsObject type
* This is a temporary adapter during migration
* Handles both ObjectEntry (domain) and legacy JiraAssetsObject formats
*/
adaptObjectEntryToJiraAssetsObject(entry: ObjectEntry | JiraAssetsObject | null): JiraAssetsObject | null {
if (!entry) return null;
// Check if already in legacy format (has 'attributes' as array with JiraAssetsAttribute)
if ('attributes' in entry && Array.isArray(entry.attributes) && entry.attributes.length > 0 && 'objectTypeAttributeId' in entry.attributes[0] && !('id' in entry.attributes[0])) {
// Validate the legacy format object has required fields
const legacyObj = entry as JiraAssetsObject;
if (legacyObj.id === null || legacyObj.id === undefined) {
logger.warn(`JiraAssetsClient: Legacy object missing id. ObjectKey: ${legacyObj.objectKey}, Label: ${legacyObj.label}`);
return null;
}
if (!legacyObj.objectKey || !String(legacyObj.objectKey).trim()) {
logger.warn(`JiraAssetsClient: Legacy object missing objectKey. ID: ${legacyObj.id}, Label: ${legacyObj.label}`);
return null;
}
if (!legacyObj.label || !String(legacyObj.label).trim()) {
logger.warn(`JiraAssetsClient: Legacy object missing label. ID: ${legacyObj.id}, ObjectKey: ${legacyObj.objectKey}`);
return null;
}
return legacyObj;
}
// Convert from ObjectEntry format
const domainEntry = entry as ObjectEntry;
// Validate required fields before conversion
if (domainEntry.id === null || domainEntry.id === undefined) {
logger.warn(`JiraAssetsClient: ObjectEntry missing id. ObjectKey: ${domainEntry.objectKey}, Label: ${domainEntry.label}`);
return null;
}
if (!domainEntry.objectKey || !String(domainEntry.objectKey).trim()) {
logger.warn(`JiraAssetsClient: ObjectEntry missing objectKey. ID: ${domainEntry.id}, Label: ${domainEntry.label}`);
return null;
}
if (!domainEntry.label || !String(domainEntry.label).trim()) {
logger.warn(`JiraAssetsClient: ObjectEntry missing label. ID: ${domainEntry.id}, ObjectKey: ${domainEntry.objectKey}`);
return null;
}
// Convert id - ensure it's a number
let objectId: number;
if (typeof domainEntry.id === 'string') {
const parsed = parseInt(domainEntry.id, 10);
if (isNaN(parsed)) {
logger.warn(`JiraAssetsClient: ObjectEntry id cannot be parsed as number: ${domainEntry.id}`);
return null;
}
objectId = parsed;
} else if (typeof domainEntry.id === 'number') {
objectId = domainEntry.id;
} else {
logger.warn(`JiraAssetsClient: ObjectEntry id has invalid type: ${typeof domainEntry.id}`);
return null;
}
return {
id: objectId,
objectKey: String(domainEntry.objectKey).trim(),
label: String(domainEntry.label).trim(),
objectType: domainEntry.objectType,
created: domainEntry.created || new Date().toISOString(),
updated: domainEntry.updated || new Date().toISOString(),
attributes: (domainEntry.attributes || []).map(attr => this.adaptObjectAttributeToJiraAssetsAttribute(attr)),
};
}
/**
* Adapt ObjectAttribute from domain types to legacy JiraAssetsAttribute type
*/
private adaptObjectAttributeToJiraAssetsAttribute(attr: ObjectAttribute): JiraAssetsAttribute {
return {
objectTypeAttributeId: attr.objectTypeAttributeId,
objectTypeAttribute: undefined, // Not in domain type, will be populated from schema if needed
objectAttributeValues: attr.objectAttributeValues.map(val => this.adaptObjectAttributeValue(val)),
};
}
/**
* Adapt ObjectAttributeValue from domain types to legacy format
*/
private adaptObjectAttributeValue(val: ObjectAttributeValue): {
value?: string;
displayValue?: string;
referencedObject?: { id: number; objectKey: string; label: string };
status?: { name: string };
} {
if (isReferenceValue(val)) {
const ref = val.referencedObject;
return {
displayValue: val.displayValue,
referencedObject: {
id: typeof ref.id === 'string' ? parseInt(ref.id, 10) : ref.id,
objectKey: ref.objectKey,
label: ref.label,
},
};
}
if (isSimpleValue(val)) {
return {
value: String(val.value),
displayValue: val.displayValue,
};
}
// StatusValue, ConfluenceValue, UserValue
return {
displayValue: val.displayValue,
status: 'status' in val ? { name: val.status.name } : undefined,
};
}
/**
* Adapt AssetsPayload (from domain types) to legacy JiraAssetsSearchResponse
*/
private adaptAssetsPayloadToSearchResponse(
payload: { objectEntries: ObjectEntry[]; totalCount?: number; totalFilterCount?: number; page?: number; pageSize?: number }
): JiraAssetsSearchResponse {
return {
objectEntries: payload.objectEntries.map(entry => this.adaptObjectEntryToJiraAssetsObject(entry)!).filter(Boolean),
totalCount: payload.totalCount || 0,
totalFilterCount: payload.totalFilterCount,
page: payload.page || 1,
pageSize: payload.pageSize || 50,
};
}
// ==========================================================================
// Object Parsing
// ==========================================================================
parseObject<T extends CMDBObject>(jiraObj: JiraAssetsObject): T | null {
async parseObject<T extends CMDBObject>(jiraObj: JiraAssetsObject): Promise<T | null> {
// Ensure lookup maps are initialized
if (Object.keys(OBJECT_TYPES_CACHE).length === 0) {
await initializeLookupMaps();
}
const typeId = jiraObj.objectType?.id;
const typeName = TYPE_ID_TO_NAME[typeId] || JIRA_NAME_TO_TYPE[jiraObj.objectType?.name];
if (!typeName) {
logger.warn(`JiraAssetsClient: Unknown object type for object ${jiraObj.objectKey || jiraObj.id}: ${jiraObj.objectType?.name} (ID: ${typeId})`);
// This is expected when repairing broken references - object types may not be configured
logger.debug(`JiraAssetsClient: Unknown object type for object ${jiraObj.objectKey || jiraObj.id}: ${jiraObj.objectType?.name} (ID: ${typeId}) - object type not configured, skipping`);
return null;
}
const typeDef = OBJECT_TYPES[typeName];
const typeDef = OBJECT_TYPES_CACHE[typeName];
if (!typeDef) {
logger.warn(`JiraAssetsClient: Type definition not found for type: ${typeName} (object: ${jiraObj.objectKey || jiraObj.id})`);
return null;
}
// Validate required fields from Jira object
if (jiraObj.id === null || jiraObj.id === undefined) {
logger.warn(`JiraAssetsClient: Object missing id field. ObjectKey: ${jiraObj.objectKey}, Label: ${jiraObj.label}, Type: ${jiraObj.objectType?.name}`);
throw new Error(`Cannot parse Jira object: missing id field`);
}
if (!jiraObj.objectKey || !String(jiraObj.objectKey).trim()) {
logger.warn(`JiraAssetsClient: Object missing objectKey. ID: ${jiraObj.id}, Label: ${jiraObj.label}, Type: ${jiraObj.objectType?.name}`);
throw new Error(`Cannot parse Jira object ${jiraObj.id}: missing objectKey`);
}
if (!jiraObj.label || !String(jiraObj.label).trim()) {
logger.warn(`JiraAssetsClient: Object missing label. ID: ${jiraObj.id}, ObjectKey: ${jiraObj.objectKey}, Type: ${jiraObj.objectType?.name}`);
throw new Error(`Cannot parse Jira object ${jiraObj.id}: missing label`);
}
// Ensure we have valid values before creating the result
const objectId = String(jiraObj.id || '');
const objectKey = String(jiraObj.objectKey || '').trim();
const label = String(jiraObj.label || '').trim();
// Double-check after conversion (in case String() produced "null" or "undefined")
if (!objectId || objectId === 'null' || objectId === 'undefined' || objectId === 'NaN') {
logger.error(`JiraAssetsClient: parseObject - invalid id after conversion. Original: ${jiraObj.id}, Converted: ${objectId}`);
throw new Error(`Cannot parse Jira object: invalid id after conversion (${objectId})`);
}
if (!objectKey || objectKey === 'null' || objectKey === 'undefined') {
logger.error(`JiraAssetsClient: parseObject - invalid objectKey after conversion. Original: ${jiraObj.objectKey}, Converted: ${objectKey}`);
throw new Error(`Cannot parse Jira object: invalid objectKey after conversion (${objectKey})`);
}
if (!label || label === 'null' || label === 'undefined') {
logger.error(`JiraAssetsClient: parseObject - invalid label after conversion. Original: ${jiraObj.label}, Converted: ${label}`);
throw new Error(`Cannot parse Jira object: invalid label after conversion (${label})`);
}
const result: Record<string, unknown> = {
id: jiraObj.id.toString(),
objectKey: jiraObj.objectKey,
label: jiraObj.label,
id: objectId,
objectKey: objectKey,
label: label,
_objectType: typeName,
_jiraUpdatedAt: jiraObj.updated || new Date().toISOString(),
_jiraCreatedAt: jiraObj.created || new Date().toISOString(),
};
// Parse each attribute based on schema
// IMPORTANT: Don't allow attributes to overwrite id, objectKey, or label
const protectedFields = new Set(['id', 'objectKey', 'label', '_objectType', '_jiraUpdatedAt', '_jiraCreatedAt']);
for (const attrDef of typeDef.attributes) {
// Skip if this attribute would overwrite a protected field
if (protectedFields.has(attrDef.fieldName)) {
logger.warn(`JiraAssetsClient: Skipping attribute ${attrDef.fieldName} (${attrDef.name}) - would overwrite protected field`);
continue;
}
const jiraAttr = this.findAttribute(jiraObj.attributes, attrDef.jiraId, attrDef.name);
const parsedValue = this.parseAttributeValue(jiraAttr, attrDef);
const parsedValue = this.parseAttributeValue(jiraAttr, {
type: attrDef.type,
isMultiple: attrDef.isMultiple ?? false, // Default to false if not specified
fieldName: attrDef.fieldName,
});
result[attrDef.fieldName] = parsedValue;
// Debug logging for Confluence Space field
@@ -420,6 +829,51 @@ class JiraAssetsClient {
}
}
// Final validation - ensure result has required fields
// This should never fail if the code above worked correctly, but it's a safety check
const finalId = String(result.id || '').trim();
const finalObjectKey = String(result.objectKey || '').trim();
const finalLabel = String(result.label || '').trim();
if (!finalId || finalId === 'null' || finalId === 'undefined' || finalId === 'NaN') {
logger.error(`JiraAssetsClient: parseObject result missing or invalid id after all processing. Result: ${JSON.stringify({
hasId: 'id' in result,
hasObjectKey: 'objectKey' in result,
hasLabel: 'label' in result,
id: result.id,
objectKey: result.objectKey,
label: result.label,
resultKeys: Object.keys(result),
jiraObj: {
id: jiraObj.id,
objectKey: jiraObj.objectKey,
label: jiraObj.label,
objectType: jiraObj.objectType?.name
}
})}`);
throw new Error(`Failed to parse Jira object: result missing or invalid id (${finalId})`);
}
if (!finalObjectKey || finalObjectKey === 'null' || finalObjectKey === 'undefined') {
logger.error(`JiraAssetsClient: parseObject result missing or invalid objectKey after all processing. Result: ${JSON.stringify({
id: result.id,
objectKey: result.objectKey,
label: result.label,
resultKeys: Object.keys(result)
})}`);
throw new Error(`Failed to parse Jira object: result missing or invalid objectKey (${finalObjectKey})`);
}
if (!finalLabel || finalLabel === 'null' || finalLabel === 'undefined') {
logger.error(`JiraAssetsClient: parseObject result missing or invalid label after all processing. Result: ${JSON.stringify({
id: result.id,
objectKey: result.objectKey,
label: result.label,
resultKeys: Object.keys(result)
})}`);
throw new Error(`Failed to parse Jira object: result missing or invalid label (${finalLabel})`);
}
return result as T;
}
@@ -449,27 +903,24 @@ class JiraAssetsClient {
return attrDef.isMultiple ? [] : null;
}
const values = jiraAttr.objectAttributeValues;
// Convert legacy attribute values to domain types for type guard usage
// This allows us to use the type guards while maintaining backward compatibility
const values = jiraAttr.objectAttributeValues as unknown as ObjectAttributeValue[];
// Use type guards from domain types
// Generic Confluence field detection: check if any value has a confluencePage
// This works for all Confluence fields regardless of their declared type (float, text, etc.)
// Type assertion needed because confluencePage is not in the type definition but exists at runtime
type AttributeValueWithConfluence = typeof values[0] & {
confluencePage?: { url?: string };
};
const valuesWithConfluence = values as AttributeValueWithConfluence[];
const hasConfluencePage = valuesWithConfluence.some(v => v.confluencePage);
const hasConfluencePage = values.some(v => 'confluencePage' in v && v.confluencePage);
if (hasConfluencePage) {
const confluencePage = valuesWithConfluence[0]?.confluencePage;
if (confluencePage?.url) {
logger.info(`[Confluence Field Parse] Found Confluence URL for field "${attrDef.fieldName || 'unknown'}": ${confluencePage.url}`);
const confluenceVal = values.find(v => 'confluencePage' in v && v.confluencePage) as ConfluenceValue | undefined;
if (confluenceVal?.confluencePage?.url) {
logger.info(`[Confluence Field Parse] Found Confluence URL for field "${attrDef.fieldName || 'unknown'}": ${confluenceVal.confluencePage.url}`);
// For multiple values, return array of URLs; for single, return the URL string
if (attrDef.isMultiple) {
return valuesWithConfluence
.filter(v => v.confluencePage?.url)
.map(v => v.confluencePage!.url);
return values
.filter((v): v is ConfluenceValue => 'confluencePage' in v && !!v.confluencePage)
.map(v => v.confluencePage.url);
}
return confluencePage.url;
return confluenceVal.confluencePage.url;
}
// Fallback to displayValue if no URL
const displayVal = values[0]?.displayValue;
@@ -482,12 +933,13 @@ class JiraAssetsClient {
switch (attrDef.type) {
case 'reference': {
// Use type guard to filter reference values
const refs = values
.filter(v => v.referencedObject)
.filter(isReferenceValue)
.map(v => ({
objectId: v.referencedObject!.id.toString(),
objectKey: v.referencedObject!.objectKey,
label: v.referencedObject!.label,
objectId: String(v.referencedObject.id),
objectKey: v.referencedObject.objectKey,
label: v.referencedObject.label,
} as ObjectReference));
return attrDef.isMultiple ? refs : refs[0] || null;
}
@@ -498,7 +950,14 @@ class JiraAssetsClient {
case 'email':
case 'select':
case 'user': {
const val = values[0]?.displayValue ?? values[0]?.value ?? null;
// Use type guard for simple values when available, otherwise fall back to legacy format
const firstVal = values[0];
let val: string | null = null;
if (isSimpleValue(firstVal)) {
val = String(firstVal.value);
} else {
val = firstVal?.displayValue ?? (firstVal as any)?.value ?? null;
}
// Strip HTML if present
if (val && typeof val === 'string' && val.includes('<')) {
return this.stripHtml(val);
@@ -507,14 +966,24 @@ class JiraAssetsClient {
}
case 'integer': {
const val = values[0]?.value;
return val ? parseInt(val, 10) : null;
const firstVal = values[0];
if (isSimpleValue(firstVal)) {
const val = typeof firstVal.value === 'number' ? firstVal.value : parseInt(String(firstVal.value), 10);
return isNaN(val) ? null : val;
}
const val = (firstVal as any)?.value;
return val ? parseInt(String(val), 10) : null;
}
case 'float': {
// Regular float parsing
const val = values[0]?.value;
const displayVal = values[0]?.displayValue;
const firstVal = values[0];
if (isSimpleValue(firstVal)) {
const val = typeof firstVal.value === 'number' ? firstVal.value : parseFloat(String(firstVal.value));
return isNaN(val) ? null : val;
}
const val = (firstVal as any)?.value;
const displayVal = firstVal?.displayValue;
// Try displayValue first, then value
if (displayVal !== undefined && displayVal !== null) {
const parsed = typeof displayVal === 'string' ? parseFloat(displayVal) : Number(displayVal);
@@ -528,25 +997,37 @@ class JiraAssetsClient {
}
case 'boolean': {
const val = values[0]?.value;
const firstVal = values[0];
if (isSimpleValue(firstVal)) {
return Boolean(firstVal.value);
}
const val = (firstVal as any)?.value;
return val === 'true' || val === 'Ja';
}
case 'date':
case 'datetime': {
return values[0]?.value ?? values[0]?.displayValue ?? null;
const firstVal = values[0];
if (isSimpleValue(firstVal)) {
return String(firstVal.value);
}
return firstVal?.displayValue ?? (firstVal as any)?.value ?? null;
}
case 'status': {
const statusVal = values[0]?.status;
if (statusVal) {
return statusVal.name || null;
const firstVal = values[0];
if ('status' in firstVal && firstVal.status) {
return firstVal.status.name || null;
}
return values[0]?.displayValue ?? values[0]?.value ?? null;
return firstVal?.displayValue ?? (firstVal as any)?.value ?? null;
}
default:
return values[0]?.displayValue ?? values[0]?.value ?? null;
const firstVal = values[0];
if (isSimpleValue(firstVal)) {
return String(firstVal.value);
}
return firstVal?.displayValue ?? (firstVal as any)?.value ?? null;
}
}

View File

@@ -1,893 +0,0 @@
import { calculateRequiredEffortApplicationManagement } from './effortCalculation.js';
import type {
ApplicationDetails,
ApplicationListItem,
ReferenceValue,
SearchFilters,
SearchResult,
ClassificationResult,
TeamDashboardData,
ApplicationStatus,
} from '../types/index.js';
// Mock application data for development/demo
const mockApplications: ApplicationDetails[] = [
{
id: '1',
key: 'APP-001',
name: 'Epic Hyperspace',
searchReference: 'EPIC-HS',
description: 'Elektronisch Patiëntendossier module voor klinische documentatie en workflow. Ondersteunt de volledige patiëntenzorg van intake tot ontslag.',
supplierProduct: 'Epic Systems / Hyperspace',
organisation: 'Zorg',
hostingType: { objectId: '1', key: 'HOST-1', name: 'On-premises' },
status: 'In Production',
businessImportance: 'Kritiek',
businessImpactAnalyse: { objectId: '1', key: 'BIA-1', name: 'BIA-2024-0042 (Klasse E)' },
systemOwner: 'J. Janssen',
businessOwner: 'Dr. A. van der Berg',
functionalApplicationManagement: 'Team EPD',
technicalApplicationManagement: 'Team Zorgapplicaties',
technicalApplicationManagementPrimary: 'Jan Jansen',
technicalApplicationManagementSecondary: 'Piet Pietersen',
medischeTechniek: false,
applicationFunctions: [],
dynamicsFactor: { objectId: '3', key: 'DYN-3', name: '3 - Hoog' },
complexityFactor: { objectId: '4', key: 'CMP-4', name: '4 - Zeer hoog' },
numberOfUsers: null,
governanceModel: { objectId: 'A', key: 'GOV-A', name: 'Centraal Beheer' },
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
{
id: '2',
key: 'APP-002',
name: 'SAP Finance',
searchReference: 'SAP-FIN',
description: 'Enterprise Resource Planning systeem voor financiële administratie, budgettering en controlling.',
supplierProduct: 'SAP SE / SAP S/4HANA',
organisation: 'Bedrijfsvoering',
hostingType: { objectId: '3', key: 'HOST-3', name: 'Cloud' },
status: 'In Production',
businessImportance: 'Kritiek',
businessImpactAnalyse: { objectId: '2', key: 'BIA-2', name: 'BIA-2024-0015 (Klasse D)' },
systemOwner: 'M. de Groot',
businessOwner: 'P. Bakker',
functionalApplicationManagement: 'Team ERP',
technicalApplicationManagement: 'Team Bedrijfsapplicaties',
medischeTechniek: false,
applicationFunctions: [],
dynamicsFactor: { objectId: '2', key: 'DYN-2', name: '2 - Gemiddeld' },
complexityFactor: { objectId: '3', key: 'CMP-3', name: '3 - Hoog' },
numberOfUsers: null,
governanceModel: null,
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
{
id: '3',
key: 'APP-003',
name: 'Philips IntelliSpace PACS',
searchReference: 'PACS',
description: 'Picture Archiving and Communication System voor opslag en weergave van medische beelden inclusief radiologie, CT en MRI.',
supplierProduct: 'Philips Healthcare / IntelliSpace PACS',
organisation: 'Zorg',
hostingType: { objectId: '1', key: 'HOST-1', name: 'On-premises' },
status: 'In Production',
businessImportance: 'Hoog',
businessImpactAnalyse: { objectId: '3', key: 'BIA-3', name: 'BIA-2024-0028 (Klasse D)' },
systemOwner: 'R. Hermans',
businessOwner: 'Dr. K. Smit',
functionalApplicationManagement: 'Team Beeldvorming',
technicalApplicationManagement: 'Team Zorgapplicaties',
medischeTechniek: true,
applicationFunctions: [],
dynamicsFactor: null,
complexityFactor: null,
numberOfUsers: null,
governanceModel: { objectId: 'C', key: 'GOV-C', name: 'Uitbesteed met ICMT-Regie' },
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
{
id: '4',
key: 'APP-004',
name: 'ChipSoft HiX',
searchReference: 'HIX',
description: 'Ziekenhuisinformatiesysteem en EPD voor patiëntregistratie, zorgplanning en klinische workflow.',
supplierProduct: 'ChipSoft / HiX',
organisation: 'Zorg',
hostingType: { objectId: '1', key: 'HOST-1', name: 'On-premises' },
status: 'In Production',
businessImportance: 'Kritiek',
businessImpactAnalyse: { objectId: '5', key: 'BIA-5', name: 'BIA-2024-0001 (Klasse F)' },
systemOwner: 'T. van Dijk',
businessOwner: 'Dr. L. Mulder',
functionalApplicationManagement: 'Team ZIS',
technicalApplicationManagement: 'Team Zorgapplicaties',
medischeTechniek: false,
applicationFunctions: [],
dynamicsFactor: { objectId: '4', key: 'DYN-4', name: '4 - Zeer hoog' },
complexityFactor: { objectId: '4', key: 'CMP-4', name: '4 - Zeer hoog' },
numberOfUsers: null,
governanceModel: { objectId: 'A', key: 'GOV-A', name: 'Centraal Beheer' },
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
{
id: '5',
key: 'APP-005',
name: 'TOPdesk',
searchReference: 'TOPDESK',
description: 'IT Service Management platform voor incident, problem en change management.',
supplierProduct: 'TOPdesk / TOPdesk Enterprise',
organisation: 'ICMT',
hostingType: { objectId: '2', key: 'HOST-2', name: 'SaaS' },
status: 'In Production',
businessImportance: 'Hoog',
businessImpactAnalyse: { objectId: '6', key: 'BIA-6', name: 'BIA-2024-0055 (Klasse C)' },
systemOwner: 'B. Willems',
businessOwner: 'H. Claessen',
functionalApplicationManagement: 'Team Servicedesk',
technicalApplicationManagement: 'Team ICT Beheer',
medischeTechniek: false,
applicationFunctions: [],
dynamicsFactor: { objectId: '2', key: 'DYN-2', name: '2 - Gemiddeld' },
complexityFactor: { objectId: '2', key: 'CMP-2', name: '2 - Gemiddeld' },
numberOfUsers: null,
governanceModel: null,
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
{
id: '6',
key: 'APP-006',
name: 'Microsoft 365',
searchReference: 'M365',
description: 'Kantoorautomatisering suite met Teams, Outlook, SharePoint, OneDrive en Office applicaties.',
supplierProduct: 'Microsoft / Microsoft 365 E5',
organisation: 'ICMT',
hostingType: { objectId: '2', key: 'HOST-2', name: 'SaaS' },
status: 'In Production',
businessImportance: 'Kritiek',
businessImpactAnalyse: { objectId: '1', key: 'BIA-1', name: 'BIA-2024-0042 (Klasse E)' },
systemOwner: 'S. Jansen',
businessOwner: 'N. Peters',
functionalApplicationManagement: 'Team Werkplek',
technicalApplicationManagement: 'Team Cloud',
medischeTechniek: false,
applicationFunctions: [],
dynamicsFactor: { objectId: '3', key: 'DYN-3', name: '3 - Hoog' },
complexityFactor: { objectId: '3', key: 'CMP-3', name: '3 - Hoog' },
numberOfUsers: { objectId: '7', key: 'USR-7', name: '> 15.000' },
governanceModel: { objectId: 'A', key: 'GOV-A', name: 'Centraal Beheer' },
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
{
id: '7',
key: 'APP-007',
name: 'Carestream Vue PACS',
searchReference: 'VUE-PACS',
description: 'Enterprise imaging platform voor radiologie en cardiologie beeldvorming.',
supplierProduct: 'Carestream Health / Vue PACS',
organisation: 'Zorg',
hostingType: { objectId: '1', key: 'HOST-1', name: 'On-premises' },
status: 'End of life',
businessImportance: 'Gemiddeld',
businessImpactAnalyse: { objectId: '9', key: 'BIA-9', name: 'BIA-2022-0089 (Klasse C)' },
systemOwner: 'R. Hermans',
businessOwner: 'Dr. K. Smit',
functionalApplicationManagement: 'Team Beeldvorming',
technicalApplicationManagement: 'Team Zorgapplicaties',
medischeTechniek: true,
applicationFunctions: [],
dynamicsFactor: { objectId: '1', key: 'DYN-1', name: '1 - Stabiel' },
complexityFactor: { objectId: '2', key: 'CMP-2', name: '2 - Gemiddeld' },
numberOfUsers: null,
governanceModel: null,
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
{
id: '8',
key: 'APP-008',
name: 'AFAS Profit',
searchReference: 'AFAS',
description: 'HR en salarisadministratie systeem voor personeelsbeheer, tijdregistratie en verloning.',
supplierProduct: 'AFAS Software / Profit',
organisation: 'Bedrijfsvoering',
hostingType: { objectId: '2', key: 'HOST-2', name: 'SaaS' },
status: 'In Production',
businessImportance: 'Hoog',
businessImpactAnalyse: { objectId: '7', key: 'BIA-7', name: 'BIA-2024-0022 (Klasse D)' },
systemOwner: 'E. Hendriks',
businessOwner: 'C. van Leeuwen',
functionalApplicationManagement: 'Team HR',
technicalApplicationManagement: 'Team Bedrijfsapplicaties',
medischeTechniek: false,
applicationFunctions: [],
dynamicsFactor: { objectId: '2', key: 'DYN-2', name: '2 - Gemiddeld' },
complexityFactor: { objectId: '2', key: 'CMP-2', name: '2 - Gemiddeld' },
numberOfUsers: { objectId: '6', key: 'USR-6', name: '10.000 - 15.000' },
governanceModel: { objectId: 'C', key: 'GOV-C', name: 'Uitbesteed met ICMT-Regie' },
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
{
id: '9',
key: 'APP-009',
name: 'Zenya',
searchReference: 'ZENYA',
description: 'Kwaliteitsmanagementsysteem voor protocollen, procedures en incidentmeldingen.',
supplierProduct: 'Infoland / Zenya',
organisation: 'Kwaliteit',
hostingType: { objectId: '2', key: 'HOST-2', name: 'SaaS' },
status: 'In Production',
businessImportance: 'Hoog',
businessImpactAnalyse: { objectId: '8', key: 'BIA-8', name: 'BIA-2024-0067 (Klasse C)' },
systemOwner: 'F. Bos',
businessOwner: 'I. Dekker',
functionalApplicationManagement: 'Team Kwaliteit',
technicalApplicationManagement: 'Team Bedrijfsapplicaties',
medischeTechniek: false,
applicationFunctions: [],
dynamicsFactor: { objectId: '2', key: 'DYN-2', name: '2 - Gemiddeld' },
complexityFactor: { objectId: '1', key: 'CMP-1', name: '1 - Laag' },
numberOfUsers: { objectId: '4', key: 'USR-4', name: '2.000 - 5.000' },
governanceModel: { objectId: 'C', key: 'GOV-C', name: 'Uitbesteed met ICMT-Regie' },
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
{
id: '10',
key: 'APP-010',
name: 'Castor EDC',
searchReference: 'CASTOR',
description: 'Electronic Data Capture platform voor klinisch wetenschappelijk onderzoek en trials.',
supplierProduct: 'Castor / Castor EDC',
organisation: 'Onderzoek',
hostingType: { objectId: '2', key: 'HOST-2', name: 'SaaS' },
status: 'In Production',
businessImportance: 'Gemiddeld',
businessImpactAnalyse: null, // BIA-2024-0078 (Klasse B) not in mock list
systemOwner: 'G. Vos',
businessOwner: 'Prof. Dr. W. Maas',
functionalApplicationManagement: 'Team Onderzoek',
technicalApplicationManagement: null,
medischeTechniek: false,
applicationFunctions: [],
dynamicsFactor: { objectId: '1', key: 'DYN-1', name: '1 - Stabiel' },
complexityFactor: { objectId: '1', key: 'CMP-1', name: '1 - Laag' },
numberOfUsers: { objectId: '1', key: 'USR-1', name: '< 100' },
governanceModel: { objectId: 'D', key: 'GOV-D', name: 'Uitbesteed met Business-Regie' },
applicationSubteam: null,
applicationTeam: null,
applicationType: null,
platform: null,
requiredEffortApplicationManagement: null,
},
];
// Mock reference data
const mockDynamicsFactors: ReferenceValue[] = [
{ objectId: '1', key: 'DYN-1', name: '1 - Stabiel', summary: 'Weinig wijzigingen, < 2 releases/jaar', description: 'Weinig wijzigingen, < 2 releases/jaar', factor: 0.8 },
{ objectId: '2', key: 'DYN-2', name: '2 - Gemiddeld', summary: 'Regelmatige wijzigingen, 2-4 releases/jaar', description: 'Regelmatige wijzigingen, 2-4 releases/jaar', factor: 1.0 },
{ objectId: '3', key: 'DYN-3', name: '3 - Hoog', summary: 'Veel wijzigingen, > 4 releases/jaar', description: 'Veel wijzigingen, > 4 releases/jaar', factor: 1.2 },
{ objectId: '4', key: 'DYN-4', name: '4 - Zeer hoog', summary: 'Continu in beweging, grote transformaties', description: 'Continu in beweging, grote transformaties', factor: 1.5 },
];
const mockComplexityFactors: ReferenceValue[] = [
{ objectId: '1', key: 'CMP-1', name: '1 - Laag', summary: 'Standalone, weinig integraties', description: 'Standalone, weinig integraties', factor: 0.8 },
{ objectId: '2', key: 'CMP-2', name: '2 - Gemiddeld', summary: 'Enkele integraties, beperkt maatwerk', description: 'Enkele integraties, beperkt maatwerk', factor: 1.0 },
{ objectId: '3', key: 'CMP-3', name: '3 - Hoog', summary: 'Veel integraties, significant maatwerk', description: 'Veel integraties, significant maatwerk', factor: 1.3 },
{ objectId: '4', key: 'CMP-4', name: '4 - Zeer hoog', summary: 'Platform, uitgebreide governance', description: 'Platform, uitgebreide governance', factor: 1.6 },
];
const mockNumberOfUsers: ReferenceValue[] = [
{ objectId: '1', key: 'USR-1', name: '< 100', order: 1, factor: 0.5 },
{ objectId: '2', key: 'USR-2', name: '100 - 500', order: 2, factor: 0.7 },
{ objectId: '3', key: 'USR-3', name: '500 - 2.000', order: 3, factor: 1.0 },
{ objectId: '4', key: 'USR-4', name: '2.000 - 5.000', order: 4, factor: 1.2 },
{ objectId: '5', key: 'USR-5', name: '5.000 - 10.000', order: 5, factor: 1.4 },
{ objectId: '6', key: 'USR-6', name: '10.000 - 15.000', order: 6, factor: 1.6 },
{ objectId: '7', key: 'USR-7', name: '> 15.000', order: 7, factor: 2.0 },
];
const mockGovernanceModels: ReferenceValue[] = [
{ objectId: 'A', key: 'GOV-A', name: 'Centraal Beheer', summary: 'ICMT voert volledig beheer uit', description: 'ICMT voert volledig beheer uit' },
{ objectId: 'B', key: 'GOV-B', name: 'Federatief Beheer', summary: 'ICMT + business delen beheer', description: 'ICMT + business delen beheer' },
{ objectId: 'C', key: 'GOV-C', name: 'Uitbesteed met ICMT-Regie', summary: 'Leverancier beheert, ICMT regisseert', description: 'Leverancier beheert, ICMT regisseert' },
{ objectId: 'D', key: 'GOV-D', name: 'Uitbesteed met Business-Regie', summary: 'Leverancier beheert, business regisseert', description: 'Leverancier beheert, business regisseert' },
{ objectId: 'E', key: 'GOV-E', name: 'Volledig Decentraal Beheer', summary: 'Business voert volledig beheer uit', description: 'Business voert volledig beheer uit' },
];
const mockOrganisations: ReferenceValue[] = [
{ objectId: '1', key: 'ORG-1', name: 'Zorg' },
{ objectId: '2', key: 'ORG-2', name: 'Bedrijfsvoering' },
{ objectId: '3', key: 'ORG-3', name: 'ICMT' },
{ objectId: '4', key: 'ORG-4', name: 'Kwaliteit' },
{ objectId: '5', key: 'ORG-5', name: 'Onderzoek' },
{ objectId: '6', key: 'ORG-6', name: 'Onderwijs' },
];
const mockHostingTypes: ReferenceValue[] = [
{ objectId: '1', key: 'HOST-1', name: 'On-premises' },
{ objectId: '2', key: 'HOST-2', name: 'SaaS' },
{ objectId: '3', key: 'HOST-3', name: 'Cloud' },
{ objectId: '4', key: 'HOST-4', name: 'Hybrid' },
];
const mockBusinessImpactAnalyses: ReferenceValue[] = [
{ objectId: '1', key: 'BIA-1', name: 'BIA-2024-0042 (Klasse E)' },
{ objectId: '2', key: 'BIA-2', name: 'BIA-2024-0015 (Klasse D)' },
{ objectId: '3', key: 'BIA-3', name: 'BIA-2024-0028 (Klasse D)' },
{ objectId: '4', key: 'BIA-4', name: 'BIA-2024-0035 (Klasse C)' },
{ objectId: '5', key: 'BIA-5', name: 'BIA-2024-0001 (Klasse F)' },
{ objectId: '6', key: 'BIA-6', name: 'BIA-2024-0055 (Klasse C)' },
{ objectId: '7', key: 'BIA-7', name: 'BIA-2024-0022 (Klasse D)' },
{ objectId: '8', key: 'BIA-8', name: 'BIA-2024-0067 (Klasse C)' },
{ objectId: '9', key: 'BIA-9', name: 'BIA-2022-0089 (Klasse C)' },
];
const mockApplicationSubteams: ReferenceValue[] = [
{ objectId: '1', key: 'SUBTEAM-1', name: 'Zorgapplicaties' },
{ objectId: '2', key: 'SUBTEAM-2', name: 'Bedrijfsvoering' },
{ objectId: '3', key: 'SUBTEAM-3', name: 'Infrastructuur' },
];
const mockApplicationTypes: ReferenceValue[] = [
{ objectId: '1', key: 'TYPE-1', name: 'Applicatie' },
{ objectId: '2', key: 'TYPE-2', name: 'Platform' },
{ objectId: '3', key: 'TYPE-3', name: 'Workload' },
];
// Classification history
const mockClassificationHistory: ClassificationResult[] = [];
// Mock data service
export class MockDataService {
private applications: ApplicationDetails[] = [...mockApplications];
async searchApplications(
filters: SearchFilters,
page: number = 1,
pageSize: number = 25
): Promise<SearchResult> {
let filtered = [...this.applications];
// Apply search text filter
if (filters.searchText) {
const search = filters.searchText.toLowerCase();
filtered = filtered.filter(
(app) =>
app.name.toLowerCase().includes(search) ||
(app.description?.toLowerCase().includes(search) ?? false) ||
(app.supplierProduct?.toLowerCase().includes(search) ?? false) ||
(app.searchReference?.toLowerCase().includes(search) ?? false)
);
}
// Apply status filter
if (filters.statuses && filters.statuses.length > 0) {
filtered = filtered.filter((app) => {
// Handle empty/null status - treat as 'Undefined' for filtering
const status = app.status || 'Undefined';
return filters.statuses!.includes(status as ApplicationStatus);
});
}
// Apply applicationFunction filter
if (filters.applicationFunction === 'empty') {
filtered = filtered.filter((app) => app.applicationFunctions.length === 0);
} else if (filters.applicationFunction === 'filled') {
filtered = filtered.filter((app) => app.applicationFunctions.length > 0);
}
// Apply governanceModel filter
if (filters.governanceModel === 'empty') {
filtered = filtered.filter((app) => !app.governanceModel);
} else if (filters.governanceModel === 'filled') {
filtered = filtered.filter((app) => !!app.governanceModel);
}
// Apply dynamicsFactor filter
if (filters.dynamicsFactor === 'empty') {
filtered = filtered.filter((app) => !app.dynamicsFactor);
} else if (filters.dynamicsFactor === 'filled') {
filtered = filtered.filter((app) => !!app.dynamicsFactor);
}
// Apply complexityFactor filter
if (filters.complexityFactor === 'empty') {
filtered = filtered.filter((app) => !app.complexityFactor);
} else if (filters.complexityFactor === 'filled') {
filtered = filtered.filter((app) => !!app.complexityFactor);
}
// Apply applicationSubteam filter
if (filters.applicationSubteam === 'empty') {
filtered = filtered.filter((app) => !app.applicationSubteam);
} else if (filters.applicationSubteam === 'filled') {
filtered = filtered.filter((app) => !!app.applicationSubteam);
}
// Apply applicationType filter
if (filters.applicationType === 'empty') {
filtered = filtered.filter((app) => !app.applicationType);
} else if (filters.applicationType === 'filled') {
filtered = filtered.filter((app) => !!app.applicationType);
}
// Apply organisation filter
if (filters.organisation) {
filtered = filtered.filter((app) => app.organisation === filters.organisation);
}
// Apply hostingType filter
if (filters.hostingType) {
filtered = filtered.filter((app) => {
if (!app.hostingType) return false;
return app.hostingType.name === filters.hostingType || app.hostingType.key === filters.hostingType;
});
}
if (filters.businessImportance) {
filtered = filtered.filter((app) => app.businessImportance === filters.businessImportance);
}
const totalCount = filtered.length;
const totalPages = Math.ceil(totalCount / pageSize);
const startIndex = (page - 1) * pageSize;
const paginatedApps = filtered.slice(startIndex, startIndex + pageSize);
return {
applications: paginatedApps.map((app) => {
const effort = calculateRequiredEffortApplicationManagement(app);
return {
id: app.id,
key: app.key,
name: app.name,
status: app.status,
applicationFunctions: app.applicationFunctions,
governanceModel: app.governanceModel,
dynamicsFactor: app.dynamicsFactor,
complexityFactor: app.complexityFactor,
applicationSubteam: app.applicationSubteam,
applicationTeam: app.applicationTeam,
applicationType: app.applicationType,
platform: app.platform,
requiredEffortApplicationManagement: effort,
};
}),
totalCount,
currentPage: page,
pageSize,
totalPages,
};
}
async getApplicationById(id: string): Promise<ApplicationDetails | null> {
const app = this.applications.find((app) => app.id === id);
if (!app) return null;
// Calculate required effort
const effort = calculateRequiredEffortApplicationManagement(app);
return {
...app,
requiredEffortApplicationManagement: effort,
};
}
async updateApplication(
id: string,
updates: {
applicationFunctions?: ReferenceValue[];
dynamicsFactor?: ReferenceValue;
complexityFactor?: ReferenceValue;
numberOfUsers?: ReferenceValue;
governanceModel?: ReferenceValue;
applicationSubteam?: ReferenceValue;
applicationTeam?: ReferenceValue;
applicationType?: ReferenceValue;
hostingType?: ReferenceValue;
businessImpactAnalyse?: ReferenceValue;
}
): Promise<boolean> {
const index = this.applications.findIndex((app) => app.id === id);
if (index === -1) return false;
const app = this.applications[index];
if (updates.applicationFunctions !== undefined) {
app.applicationFunctions = updates.applicationFunctions;
}
if (updates.dynamicsFactor !== undefined) {
app.dynamicsFactor = updates.dynamicsFactor;
}
if (updates.complexityFactor !== undefined) {
app.complexityFactor = updates.complexityFactor;
}
if (updates.numberOfUsers !== undefined) {
app.numberOfUsers = updates.numberOfUsers;
}
if (updates.governanceModel !== undefined) {
app.governanceModel = updates.governanceModel;
}
if (updates.applicationSubteam !== undefined) {
app.applicationSubteam = updates.applicationSubteam;
}
if (updates.applicationTeam !== undefined) {
app.applicationTeam = updates.applicationTeam;
}
if (updates.applicationType !== undefined) {
app.applicationType = updates.applicationType;
}
if (updates.hostingType !== undefined) {
app.hostingType = updates.hostingType;
}
if (updates.businessImpactAnalyse !== undefined) {
app.businessImpactAnalyse = updates.businessImpactAnalyse;
}
return true;
}
async getDynamicsFactors(): Promise<ReferenceValue[]> {
return mockDynamicsFactors;
}
async getComplexityFactors(): Promise<ReferenceValue[]> {
return mockComplexityFactors;
}
async getNumberOfUsers(): Promise<ReferenceValue[]> {
return mockNumberOfUsers;
}
async getGovernanceModels(): Promise<ReferenceValue[]> {
return mockGovernanceModels;
}
async getOrganisations(): Promise<ReferenceValue[]> {
return mockOrganisations;
}
async getHostingTypes(): Promise<ReferenceValue[]> {
return mockHostingTypes;
}
async getBusinessImpactAnalyses(): Promise<ReferenceValue[]> {
return mockBusinessImpactAnalyses;
}
async getApplicationManagementHosting(): Promise<ReferenceValue[]> {
// Mock Application Management - Hosting values (v25)
return [
{ objectId: '1', key: 'AMH-1', name: 'On-Premises' },
{ objectId: '2', key: 'AMH-2', name: 'Azure - Eigen beheer' },
{ objectId: '3', key: 'AMH-3', name: 'Azure - Delegated Management' },
{ objectId: '4', key: 'AMH-4', name: 'Extern (SaaS)' },
];
}
async getApplicationManagementTAM(): Promise<ReferenceValue[]> {
// Mock Application Management - TAM values
return [
{ objectId: '1', key: 'TAM-1', name: 'ICMT' },
{ objectId: '2', key: 'TAM-2', name: 'Business' },
{ objectId: '3', key: 'TAM-3', name: 'Leverancier' },
];
}
async getApplicationFunctions(): Promise<ReferenceValue[]> {
// Return empty for mock - in real implementation, this comes from Jira
return [];
}
async getApplicationSubteams(): Promise<ReferenceValue[]> {
// Return empty for mock - in real implementation, this comes from Jira
return [];
}
async getApplicationTypes(): Promise<ReferenceValue[]> {
// Return empty for mock - in real implementation, this comes from Jira
return [];
}
async getBusinessImportance(): Promise<ReferenceValue[]> {
// Return empty for mock - in real implementation, this comes from Jira
return [];
}
async getApplicationFunctionCategories(): Promise<ReferenceValue[]> {
// Return empty for mock - in real implementation, this comes from Jira
return [];
}
async getStats() {
// Filter out applications with status "Closed" for KPIs
const activeApplications = this.applications.filter((a) => a.status !== 'Closed');
const total = activeApplications.length;
const classified = activeApplications.filter((a) => a.applicationFunctions.length > 0).length;
const unclassified = total - classified;
const byStatus: Record<string, number> = {};
const byGovernanceModel: Record<string, number> = {};
activeApplications.forEach((app) => {
if (app.status) {
byStatus[app.status] = (byStatus[app.status] || 0) + 1;
}
if (app.governanceModel) {
byGovernanceModel[app.governanceModel.name] =
(byGovernanceModel[app.governanceModel.name] || 0) + 1;
}
});
return {
totalApplications: total,
classifiedCount: classified,
unclassifiedCount: unclassified,
byStatus,
byDomain: {},
byGovernanceModel,
recentClassifications: mockClassificationHistory.slice(-10),
};
}
addClassificationResult(result: ClassificationResult): void {
mockClassificationHistory.push(result);
}
getClassificationHistory(): ClassificationResult[] {
return [...mockClassificationHistory];
}
async getTeamDashboardData(excludedStatuses: ApplicationStatus[] = []): Promise<TeamDashboardData> {
// Convert ApplicationDetails to ApplicationListItem for dashboard
let listItems: ApplicationListItem[] = this.applications.map(app => ({
id: app.id,
key: app.key,
name: app.name,
status: app.status,
applicationFunctions: app.applicationFunctions,
governanceModel: app.governanceModel,
dynamicsFactor: app.dynamicsFactor,
complexityFactor: app.complexityFactor,
applicationSubteam: app.applicationSubteam,
applicationTeam: app.applicationTeam,
applicationType: app.applicationType,
platform: app.platform,
requiredEffortApplicationManagement: app.requiredEffortApplicationManagement,
}));
// Filter out excluded statuses
if (excludedStatuses.length > 0) {
listItems = listItems.filter(app => !app.status || !excludedStatuses.includes(app.status));
}
// Separate applications into Platforms, Workloads, and regular applications
const platforms: ApplicationListItem[] = [];
const workloads: ApplicationListItem[] = [];
const regularApplications: ApplicationListItem[] = [];
for (const app of listItems) {
const isPlatform = app.applicationType?.name === 'Platform';
const isWorkload = app.platform !== null;
if (isPlatform) {
platforms.push(app);
} else if (isWorkload) {
workloads.push(app);
} else {
regularApplications.push(app);
}
}
// Group workloads by their platform
const workloadsByPlatform = new Map<string, ApplicationListItem[]>();
for (const workload of workloads) {
const platformId = workload.platform!.objectId;
if (!workloadsByPlatform.has(platformId)) {
workloadsByPlatform.set(platformId, []);
}
workloadsByPlatform.get(platformId)!.push(workload);
}
// Build PlatformWithWorkloads structures
const platformsWithWorkloads: import('../types/index.js').PlatformWithWorkloads[] = [];
for (const platform of platforms) {
const platformWorkloads = workloadsByPlatform.get(platform.id) || [];
const platformEffort = platform.requiredEffortApplicationManagement || 0;
const workloadsEffort = platformWorkloads.reduce((sum, w) => sum + (w.requiredEffortApplicationManagement || 0), 0);
platformsWithWorkloads.push({
platform,
workloads: platformWorkloads,
platformEffort,
workloadsEffort,
totalEffort: platformEffort + workloadsEffort,
});
}
// Group all applications (regular + platforms + workloads) by subteam
const subteamMap = new Map<string, {
regular: ApplicationListItem[];
platforms: import('../types/index.js').PlatformWithWorkloads[];
}>();
const unassigned: {
regular: ApplicationListItem[];
platforms: import('../types/index.js').PlatformWithWorkloads[];
} = {
regular: [],
platforms: [],
};
// Group regular applications by subteam
for (const app of regularApplications) {
if (app.applicationSubteam) {
const subteamId = app.applicationSubteam.objectId;
if (!subteamMap.has(subteamId)) {
subteamMap.set(subteamId, { regular: [], platforms: [] });
}
subteamMap.get(subteamId)!.regular.push(app);
} else {
unassigned.regular.push(app);
}
}
// Group platforms by subteam
for (const platformWithWorkloads of platformsWithWorkloads) {
const platform = platformWithWorkloads.platform;
if (platform.applicationSubteam) {
const subteamId = platform.applicationSubteam.objectId;
if (!subteamMap.has(subteamId)) {
subteamMap.set(subteamId, { regular: [], platforms: [] });
}
subteamMap.get(subteamId)!.platforms.push(platformWithWorkloads);
} else {
unassigned.platforms.push(platformWithWorkloads);
}
}
// Build subteams from mock data
const allSubteams = mockApplicationSubteams;
const subteams: import('../types/index.js').TeamDashboardSubteam[] = allSubteams.map(subteamRef => {
const subteamData = subteamMap.get(subteamRef.objectId) || { regular: [], platforms: [] };
const regularApps = subteamData.regular;
const platforms = subteamData.platforms;
// Calculate total effort: regular apps + platforms (including their workloads)
const regularEffort = regularApps.reduce((sum, app) =>
sum + (app.requiredEffortApplicationManagement || 0), 0
);
const platformsEffort = platforms.reduce((sum, p) => sum + p.totalEffort, 0);
const totalEffort = regularEffort + platformsEffort;
// Calculate total application count: regular apps + platforms + workloads
const platformsCount = platforms.length;
const workloadsCount = platforms.reduce((sum, p) => sum + p.workloads.length, 0);
const applicationCount = regularApps.length + platformsCount + workloadsCount;
// Calculate governance model distribution (including platforms and workloads)
const byGovernanceModel: Record<string, number> = {};
for (const app of regularApps) {
const govModel = app.governanceModel?.name || 'Niet ingesteld';
byGovernanceModel[govModel] = (byGovernanceModel[govModel] || 0) + 1;
}
for (const platformWithWorkloads of platforms) {
const platform = platformWithWorkloads.platform;
const govModel = platform.governanceModel?.name || 'Niet ingesteld';
byGovernanceModel[govModel] = (byGovernanceModel[govModel] || 0) + 1;
// Also count workloads
for (const workload of platformWithWorkloads.workloads) {
const workloadGovModel = workload.governanceModel?.name || 'Niet ingesteld';
byGovernanceModel[workloadGovModel] = (byGovernanceModel[workloadGovModel] || 0) + 1;
}
}
return {
subteam: subteamRef,
applications: regularApps,
platforms,
totalEffort,
minEffort: totalEffort * 0.8, // Mock: min is 80% of total
maxEffort: totalEffort * 1.2, // Mock: max is 120% of total
applicationCount,
byGovernanceModel,
};
}).filter(s => s.applicationCount > 0); // Only include subteams with apps
// Create a virtual team containing all subteams (since Team doesn't exist in mock data)
const virtualTeam: import('../types/index.js').TeamDashboardTeam = {
team: {
objectId: 'mock-team-1',
key: 'TEAM-1',
name: 'Mock Team',
teamType: 'Business',
},
subteams,
totalEffort: subteams.reduce((sum, s) => sum + s.totalEffort, 0),
minEffort: subteams.reduce((sum, s) => sum + s.minEffort, 0),
maxEffort: subteams.reduce((sum, s) => sum + s.maxEffort, 0),
applicationCount: subteams.reduce((sum, s) => sum + s.applicationCount, 0),
byGovernanceModel: subteams.reduce((acc, s) => {
for (const [key, count] of Object.entries(s.byGovernanceModel)) {
acc[key] = (acc[key] || 0) + count;
}
return acc;
}, {} as Record<string, number>),
};
// Calculate unassigned totals
const unassignedRegularEffort = unassigned.regular.reduce((sum, app) =>
sum + (app.requiredEffortApplicationManagement || 0), 0
);
const unassignedPlatformsEffort = unassigned.platforms.reduce((sum, p) => sum + p.totalEffort, 0);
const unassignedTotalEffort = unassignedRegularEffort + unassignedPlatformsEffort;
const unassignedPlatformsCount = unassigned.platforms.length;
const unassignedWorkloadsCount = unassigned.platforms.reduce((sum, p) => sum + p.workloads.length, 0);
const unassignedApplicationCount = unassigned.regular.length + unassignedPlatformsCount + unassignedWorkloadsCount;
// Calculate governance model distribution for unassigned
const unassignedByGovernanceModel: Record<string, number> = {};
for (const app of unassigned.regular) {
const govModel = app.governanceModel?.name || 'Niet ingesteld';
unassignedByGovernanceModel[govModel] = (unassignedByGovernanceModel[govModel] || 0) + 1;
}
for (const platformWithWorkloads of unassigned.platforms) {
const platform = platformWithWorkloads.platform;
const govModel = platform.governanceModel?.name || 'Niet ingesteld';
unassignedByGovernanceModel[govModel] = (unassignedByGovernanceModel[govModel] || 0) + 1;
for (const workload of platformWithWorkloads.workloads) {
const workloadGovModel = workload.governanceModel?.name || 'Niet ingesteld';
unassignedByGovernanceModel[workloadGovModel] = (unassignedByGovernanceModel[workloadGovModel] || 0) + 1;
}
}
return {
teams: subteams.length > 0 ? [virtualTeam] : [],
unassigned: {
subteam: null,
applications: unassigned.regular,
platforms: unassigned.platforms,
totalEffort: unassignedTotalEffort,
minEffort: unassignedTotalEffort * 0.8, // Mock: min is 80% of total
maxEffort: unassignedTotalEffort * 1.2, // Mock: max is 120% of total
applicationCount: unassignedApplicationCount,
byGovernanceModel: unassignedByGovernanceModel,
},
};
}
}
export const mockDataService = new MockDataService();

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,277 @@
/**
* Generic Query Builder
*
* Builds SQL queries dynamically based on filters and schema.
*/
import { logger } from './logger.js';
import { schemaDiscoveryService } from './schemaDiscoveryService.js';
import type { CMDBObjectTypeName } from '../generated/jira-types.js';
import type { AttributeDefinition } from '../generated/jira-schema.js';
class QueryBuilder {
/**
* Build WHERE clause from filters
*/
async buildWhereClause(
filters: Record<string, unknown>,
typeName: CMDBObjectTypeName
): Promise<{ whereClause: string; params: unknown[] }> {
const conditions: string[] = ['o.object_type_name = ?'];
const params: unknown[] = [typeName];
let paramIndex = 2;
for (const [fieldName, filterValue] of Object.entries(filters)) {
if (filterValue === undefined || filterValue === null) continue;
const attrDef = await schemaDiscoveryService.getAttribute(typeName, fieldName);
if (!attrDef) {
logger.debug(`QueryBuilder: Unknown field ${fieldName} for type ${typeName}, skipping`);
continue;
}
const condition = this.buildFilterCondition(fieldName, filterValue, attrDef, paramIndex);
if (condition.condition) {
conditions.push(condition.condition);
params.push(...condition.params);
paramIndex += condition.params.length;
}
}
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
return { whereClause, params };
}
/**
* Build filter condition for one field
*/
buildFilterCondition(
fieldName: string,
filterValue: unknown,
attrDef: AttributeDefinition,
startParamIndex: number
): { condition: string; params: unknown[] } {
// Handle special operators
if (typeof filterValue === 'object' && filterValue !== null && !Array.isArray(filterValue)) {
const filterObj = filterValue as Record<string, unknown>;
// Exists check
if (filterObj.exists === true) {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = o.id AND a.field_name = ?
)`,
params: [fieldName]
};
}
// Empty check
if (filterObj.empty === true) {
return {
condition: `NOT EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = o.id AND a.field_name = ?
)`,
params: [fieldName]
};
}
// Contains (text search)
if (filterObj.contains !== undefined && typeof filterObj.contains === 'string') {
if (attrDef.type === 'text' || attrDef.type === 'textarea') {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND LOWER(av.text_value) LIKE LOWER(?)
)`,
params: [fieldName, `%${filterObj.contains}%`]
};
}
}
// Reference filters
if (attrDef.type === 'reference') {
if (filterObj.objectId !== undefined) {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND av.reference_object_id = ?
)`,
params: [fieldName, String(filterObj.objectId)]
};
}
if (filterObj.objectKey !== undefined) {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
JOIN objects ref_obj ON av.reference_object_id = ref_obj.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND ref_obj.object_key = ?
)`,
params: [fieldName, String(filterObj.objectKey)]
};
}
if (filterObj.label !== undefined) {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
JOIN objects ref_obj ON av.reference_object_id = ref_obj.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND LOWER(ref_obj.label) = LOWER(?)
)`,
params: [fieldName, String(filterObj.label)]
};
}
}
}
// Handle array filters (for multiple reference fields)
if (attrDef.isMultiple && Array.isArray(filterValue)) {
if (attrDef.type === 'reference') {
const conditions: string[] = [];
const params: unknown[] = [];
for (const val of filterValue) {
if (typeof val === 'object' && val !== null) {
const ref = val as { objectId?: string; objectKey?: string };
if (ref.objectId) {
conditions.push(`EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND av.reference_object_id = ?
)`);
params.push(fieldName, ref.objectId);
} else if (ref.objectKey) {
conditions.push(`EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
JOIN objects ref_obj ON av.reference_object_id = ref_obj.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND ref_obj.object_key = ?
)`);
params.push(fieldName, ref.objectKey);
}
}
}
if (conditions.length > 0) {
return { condition: `(${conditions.join(' OR ')})`, params };
}
}
}
// Simple value filters
if (attrDef.type === 'reference') {
if (typeof filterValue === 'object' && filterValue !== null) {
const ref = filterValue as { objectId?: string; objectKey?: string; label?: string };
if (ref.objectId) {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND av.reference_object_id = ?
)`,
params: [fieldName, ref.objectId]
};
} else if (ref.objectKey) {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
JOIN objects ref_obj ON av.reference_object_id = ref_obj.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND ref_obj.object_key = ?
)`,
params: [fieldName, ref.objectKey]
};
} else if (ref.label) {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
JOIN objects ref_obj ON av.reference_object_id = ref_obj.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND LOWER(ref_obj.label) = LOWER(?)
)`,
params: [fieldName, ref.label]
};
}
}
} else if (attrDef.type === 'text' || attrDef.type === 'textarea' || attrDef.type === 'url' || attrDef.type === 'email' || attrDef.type === 'select' || attrDef.type === 'user' || attrDef.type === 'status') {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND av.text_value = ?
)`,
params: [fieldName, String(filterValue)]
};
} else if (attrDef.type === 'integer' || attrDef.type === 'float') {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND av.number_value = ?
)`,
params: [fieldName, Number(filterValue)]
};
} else if (attrDef.type === 'boolean') {
return {
condition: `EXISTS (
SELECT 1 FROM attribute_values av
JOIN attributes a ON av.attribute_id = a.id
WHERE av.object_id = o.id
AND a.field_name = ?
AND av.boolean_value = ?
)`,
params: [fieldName, Boolean(filterValue)]
};
}
return { condition: '', params: [] };
}
/**
* Build ORDER BY clause
*/
buildOrderBy(orderBy?: string, orderDir?: 'ASC' | 'DESC'): string {
const safeOrderBy = ['id', 'object_key', 'object_type_name', 'label', 'cached_at'].includes(orderBy || '')
? (orderBy || 'label')
: 'label';
const safeOrderDir = orderDir === 'DESC' ? 'DESC' : 'ASC';
return `ORDER BY o.${safeOrderBy} ${safeOrderDir}`;
}
/**
* Build pagination clause
*/
buildPagination(limit?: number, offset?: number): string {
const limitValue = limit || 100;
const offsetValue = offset || 0;
return `LIMIT ${limitValue} OFFSET ${offsetValue}`;
}
}
export const queryBuilder = new QueryBuilder();

View File

@@ -0,0 +1,256 @@
/**
* Schema Cache Service
*
* In-memory cache for schema data with TTL support.
* Provides fast access to schema information without hitting the database on every request.
*/
import { logger } from './logger.js';
import { schemaDiscoveryService } from './schemaDiscoveryService.js';
import type { ObjectTypeDefinition, AttributeDefinition } from '../generated/jira-schema.js';
import { getDatabaseAdapter } from './database/singleton.js';
interface SchemaResponse {
metadata: {
generatedAt: string;
objectTypeCount: number;
totalAttributes: number;
enabledObjectTypeCount?: number;
};
objectTypes: Record<string, ObjectTypeWithLinks>;
cacheCounts?: Record<string, number>;
jiraCounts?: Record<string, number>;
}
interface ObjectTypeWithLinks extends ObjectTypeDefinition {
enabled: boolean; // Whether this object type is enabled for syncing
incomingLinks: Array<{
fromType: string;
fromTypeName: string;
attributeName: string;
isMultiple: boolean;
}>;
outgoingLinks: Array<{
toType: string;
toTypeName: string;
attributeName: string;
isMultiple: boolean;
}>;
}
class SchemaCacheService {
private cache: SchemaResponse | null = null;
private cacheTimestamp: number = 0;
private readonly CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes
private db = getDatabaseAdapter(); // Use shared database adapter singleton
/**
* Get schema from cache or fetch from database
*/
async getSchema(): Promise<SchemaResponse> {
// Check cache validity
const now = Date.now();
if (this.cache && (now - this.cacheTimestamp) < this.CACHE_TTL_MS) {
logger.debug('SchemaCache: Returning cached schema');
return this.cache;
}
// Cache expired or doesn't exist - fetch from database
logger.debug('SchemaCache: Cache expired or missing, fetching from database');
const schema = await this.fetchFromDatabase();
// Update cache
this.cache = schema;
this.cacheTimestamp = now;
return schema;
}
/**
* Invalidate cache (force refresh on next request)
*/
invalidate(): void {
logger.debug('SchemaCache: Invalidating cache');
this.cache = null;
this.cacheTimestamp = 0;
}
/**
* Fetch schema from database and build response
* Returns ALL object types (enabled and disabled) with their sync status
*/
private async fetchFromDatabase(): Promise<SchemaResponse> {
// Schema discovery must be manually triggered via API endpoints
// No automatic discovery on first run
// Fetch ALL object types (enabled and disabled) with their schema info
const objectTypeRows = await this.db.query<{
id: number;
schema_id: number;
jira_type_id: number;
type_name: string;
display_name: string;
description: string | null;
sync_priority: number;
object_count: number;
enabled: boolean | number;
}>(
`SELECT ot.id, ot.schema_id, ot.jira_type_id, ot.type_name, ot.display_name, ot.description, ot.sync_priority, ot.object_count, ot.enabled
FROM object_types ot
ORDER BY ot.sync_priority, ot.type_name`
);
if (objectTypeRows.length === 0) {
// No types found, return empty schema
return {
metadata: {
generatedAt: new Date().toISOString(),
objectTypeCount: 0,
totalAttributes: 0,
},
objectTypes: {},
};
}
// Fetch attributes for ALL object types using JOIN
const attributeRows = await this.db.query<{
id: number;
jira_attr_id: number;
object_type_name: string;
attr_name: string;
field_name: string;
attr_type: string;
is_multiple: boolean | number;
is_editable: boolean | number;
is_required: boolean | number;
is_system: boolean | number;
reference_type_name: string | null;
description: string | null;
position: number | null;
schema_id: number;
type_name: string;
}>(
`SELECT a.*, ot.schema_id, ot.type_name
FROM attributes a
INNER JOIN object_types ot ON a.object_type_name = ot.type_name
ORDER BY ot.type_name, COALESCE(a.position, 0), a.jira_attr_id`
);
logger.debug(`SchemaCache: Found ${objectTypeRows.length} object types (enabled and disabled) and ${attributeRows.length} attributes`);
// Build object types with attributes
// Use type_name as key (even if same type exists in multiple schemas, we'll show the first enabled one)
// In practice, if same type_name exists in multiple schemas, attributes should be the same
const objectTypesWithLinks: Record<string, ObjectTypeWithLinks> = {};
for (const typeRow of objectTypeRows) {
const typeName = typeRow.type_name;
// Skip if we already have this type_name (first enabled one wins)
if (objectTypesWithLinks[typeName]) {
logger.debug(`SchemaCache: Skipping duplicate type_name ${typeName} from schema ${typeRow.schema_id}`);
continue;
}
// Match attributes by both schema_id and type_name to ensure correct mapping
const matchingAttributes = attributeRows.filter(a => a.schema_id === typeRow.schema_id && a.type_name === typeName);
logger.debug(`SchemaCache: Found ${matchingAttributes.length} attributes for ${typeName} (schema_id: ${typeRow.schema_id})`);
const attributes = matchingAttributes.map(attrRow => {
// Convert boolean/number for SQLite compatibility
const isMultiple = typeof attrRow.is_multiple === 'boolean' ? attrRow.is_multiple : attrRow.is_multiple === 1;
const isEditable = typeof attrRow.is_editable === 'boolean' ? attrRow.is_editable : attrRow.is_editable === 1;
const isRequired = typeof attrRow.is_required === 'boolean' ? attrRow.is_required : attrRow.is_required === 1;
const isSystem = typeof attrRow.is_system === 'boolean' ? attrRow.is_system : attrRow.is_system === 1;
return {
jiraId: attrRow.jira_attr_id,
name: attrRow.attr_name,
fieldName: attrRow.field_name,
type: attrRow.attr_type as AttributeDefinition['type'],
isMultiple,
isEditable,
isRequired,
isSystem,
referenceTypeName: attrRow.reference_type_name || undefined,
description: attrRow.description || undefined,
position: attrRow.position ?? 0,
} as AttributeDefinition;
});
// Convert enabled boolean/number to boolean
const isEnabled = typeof typeRow.enabled === 'boolean' ? typeRow.enabled : typeRow.enabled === 1;
objectTypesWithLinks[typeName] = {
jiraTypeId: typeRow.jira_type_id,
name: typeRow.display_name,
typeName: typeName,
syncPriority: typeRow.sync_priority,
objectCount: typeRow.object_count,
enabled: isEnabled,
attributes,
incomingLinks: [],
outgoingLinks: [],
};
}
// Build link relationships
for (const [typeName, typeDef] of Object.entries(objectTypesWithLinks)) {
for (const attr of typeDef.attributes) {
if (attr.type === 'reference' && attr.referenceTypeName) {
// Add outgoing link from this type
typeDef.outgoingLinks.push({
toType: attr.referenceTypeName,
toTypeName: objectTypesWithLinks[attr.referenceTypeName]?.name || attr.referenceTypeName,
attributeName: attr.name,
isMultiple: attr.isMultiple,
});
// Add incoming link to the referenced type
if (objectTypesWithLinks[attr.referenceTypeName]) {
objectTypesWithLinks[attr.referenceTypeName].incomingLinks.push({
fromType: typeName,
fromTypeName: typeDef.name,
attributeName: attr.name,
isMultiple: attr.isMultiple,
});
}
}
}
}
// Get cache counts (objectsByType) if available
let cacheCounts: Record<string, number> | undefined;
try {
const { dataService } = await import('./dataService.js');
const cacheStatus = await dataService.getCacheStatus();
cacheCounts = cacheStatus.objectsByType;
} catch (err) {
logger.debug('SchemaCache: Could not fetch cache counts', err);
// Continue without cache counts - not critical
}
// Calculate metadata (include enabled count)
const totalAttributes = Object.values(objectTypesWithLinks).reduce(
(sum, t) => sum + t.attributes.length,
0
);
const enabledCount = Object.values(objectTypesWithLinks).filter(t => t.enabled).length;
const response: SchemaResponse = {
metadata: {
generatedAt: new Date().toISOString(),
objectTypeCount: objectTypeRows.length,
totalAttributes,
enabledObjectTypeCount: enabledCount,
},
objectTypes: objectTypesWithLinks,
cacheCounts,
};
return response;
}
}
// Export singleton instance
export const schemaCacheService = new SchemaCacheService();

View File

@@ -0,0 +1,468 @@
/**
* Schema Configuration Service
*
* Manages schema and object type configuration for syncing.
* Discovers schemas and object types from Jira Assets API and allows
* enabling/disabling specific object types for synchronization.
*/
import { logger } from './logger.js';
import { normalizedCacheStore } from './normalizedCacheStore.js';
import { config } from '../config/env.js';
import { toPascalCase } from './schemaUtils.js';
export interface JiraSchema {
id: number;
name: string;
description?: string;
objectTypeCount?: number;
}
export interface JiraObjectType {
id: number;
name: string;
description?: string;
objectCount?: number;
objectSchemaId: number;
parentObjectTypeId?: number;
inherited?: boolean;
abstractObjectType?: boolean;
}
export interface ConfiguredObjectType {
id: string; // "schemaId:objectTypeId"
schemaId: string;
schemaName: string;
objectTypeId: number;
objectTypeName: string;
displayName: string;
description: string | null;
objectCount: number;
enabled: boolean;
discoveredAt: string;
updatedAt: string;
}
class SchemaConfigurationService {
constructor() {
// Configuration service - no API calls needed, uses database only
}
/**
* NOTE: Schema discovery is now handled by SchemaSyncService.
* This service only manages configuration (enabling/disabling object types).
* Use schemaSyncService.syncAll() to discover and sync schemas, object types, and attributes.
*/
/**
* Get all configured object types grouped by schema
*/
async getConfiguredObjectTypes(): Promise<Array<{
schemaId: string;
schemaName: string;
objectTypes: ConfiguredObjectType[];
}>> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
const rows = await db.query<{
id: number;
schema_id: number;
jira_schema_id: string;
schema_name: string;
jira_type_id: number;
type_name: string;
display_name: string;
description: string | null;
object_count: number;
enabled: boolean | number;
discovered_at: string;
updated_at: string;
}>(`
SELECT
ot.id,
ot.schema_id,
s.jira_schema_id,
s.name as schema_name,
ot.jira_type_id,
ot.type_name,
ot.display_name,
ot.description,
ot.object_count,
ot.enabled,
ot.discovered_at,
ot.updated_at
FROM object_types ot
JOIN schemas s ON ot.schema_id = s.id
ORDER BY s.name ASC, ot.display_name ASC
`);
// Group by schema
const schemaMap = new Map<string, ConfiguredObjectType[]>();
for (const row of rows) {
const objectType: ConfiguredObjectType = {
id: `${row.jira_schema_id}:${row.jira_type_id}`, // Keep same format for compatibility
schemaId: row.jira_schema_id,
schemaName: row.schema_name,
objectTypeId: row.jira_type_id,
objectTypeName: row.type_name,
displayName: row.display_name,
description: row.description,
objectCount: row.object_count,
enabled: typeof row.enabled === 'boolean' ? row.enabled : row.enabled === 1,
discoveredAt: row.discovered_at,
updatedAt: row.updated_at,
};
if (!schemaMap.has(row.jira_schema_id)) {
schemaMap.set(row.jira_schema_id, []);
}
schemaMap.get(row.jira_schema_id)!.push(objectType);
}
// Convert to array
return Array.from(schemaMap.entries()).map(([schemaId, objectTypes]) => {
const firstType = objectTypes[0];
return {
schemaId,
schemaName: firstType.schemaName,
objectTypes,
};
});
}
/**
* Set enabled status for an object type
* id format: "schemaId:objectTypeId" (e.g., "6:123")
*/
async setObjectTypeEnabled(id: string, enabled: boolean): Promise<void> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
// Parse id: "schemaId:objectTypeId"
const [schemaIdStr, objectTypeIdStr] = id.split(':');
if (!schemaIdStr || !objectTypeIdStr) {
throw new Error(`Invalid object type id format: ${id}. Expected format: "schemaId:objectTypeId"`);
}
const objectTypeId = parseInt(objectTypeIdStr, 10);
if (isNaN(objectTypeId)) {
throw new Error(`Invalid object type id: ${objectTypeIdStr}`);
}
// Get schema_id (FK) from schemas table
const schemaRow = await db.queryOne<{ id: number }>(
`SELECT id FROM schemas WHERE jira_schema_id = ?`,
[schemaIdStr]
);
if (!schemaRow) {
throw new Error(`Schema ${schemaIdStr} not found`);
}
// Check if type_name is missing and try to fix it if enabling
const currentType = await db.queryOne<{ type_name: string | null; display_name: string }>(
`SELECT type_name, display_name FROM object_types WHERE schema_id = ? AND jira_type_id = ?`,
[schemaRow.id, objectTypeId]
);
let typeNameToSet = currentType?.type_name;
const needsTypeNameFix = enabled && (!typeNameToSet || typeNameToSet.trim() === '');
if (needsTypeNameFix && currentType?.display_name) {
// Try to generate type_name from display_name (PascalCase)
const { toPascalCase } = await import('./schemaUtils.js');
typeNameToSet = toPascalCase(currentType.display_name);
logger.warn(`SchemaConfiguration: Type ${id} has missing type_name. Auto-generating "${typeNameToSet}" from display_name "${currentType.display_name}"`);
}
const now = new Date().toISOString();
if (db.isPostgres) {
if (needsTypeNameFix && typeNameToSet) {
await db.execute(`
UPDATE object_types
SET enabled = ?, type_name = ?, updated_at = ?
WHERE schema_id = ? AND jira_type_id = ?
`, [enabled, typeNameToSet, now, schemaRow.id, objectTypeId]);
logger.info(`SchemaConfiguration: Set object type ${id} enabled=${enabled} and fixed missing type_name to "${typeNameToSet}"`);
} else {
await db.execute(`
UPDATE object_types
SET enabled = ?, updated_at = ?
WHERE schema_id = ? AND jira_type_id = ?
`, [enabled, now, schemaRow.id, objectTypeId]);
logger.info(`SchemaConfiguration: Set object type ${id} enabled=${enabled}`);
}
} else {
if (needsTypeNameFix && typeNameToSet) {
await db.execute(`
UPDATE object_types
SET enabled = ?, type_name = ?, updated_at = ?
WHERE schema_id = ? AND jira_type_id = ?
`, [enabled ? 1 : 0, typeNameToSet, now, schemaRow.id, objectTypeId]);
logger.info(`SchemaConfiguration: Set object type ${id} enabled=${enabled} and fixed missing type_name to "${typeNameToSet}"`);
} else {
await db.execute(`
UPDATE object_types
SET enabled = ?, updated_at = ?
WHERE schema_id = ? AND jira_type_id = ?
`, [enabled ? 1 : 0, now, schemaRow.id, objectTypeId]);
logger.info(`SchemaConfiguration: Set object type ${id} enabled=${enabled}`);
}
}
}
/**
* Bulk update enabled status for multiple object types
*/
async bulkSetObjectTypesEnabled(updates: Array<{ id: string; enabled: boolean }>): Promise<void> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
const now = new Date().toISOString();
await db.transaction(async (txDb) => {
for (const update of updates) {
// Parse id: "schemaId:objectTypeId"
const [schemaIdStr, objectTypeIdStr] = update.id.split(':');
if (!schemaIdStr || !objectTypeIdStr) {
logger.warn(`SchemaConfiguration: Invalid object type id format: ${update.id}`);
continue;
}
const objectTypeId = parseInt(objectTypeIdStr, 10);
if (isNaN(objectTypeId)) {
logger.warn(`SchemaConfiguration: Invalid object type id: ${objectTypeIdStr}`);
continue;
}
// Get schema_id (FK) from schemas table
const schemaRow = await txDb.queryOne<{ id: number }>(
`SELECT id FROM schemas WHERE jira_schema_id = ?`,
[schemaIdStr]
);
if (!schemaRow) {
logger.warn(`SchemaConfiguration: Schema ${schemaIdStr} not found`);
continue;
}
// Check if type_name is missing and try to fix it if enabling
const currentType = await txDb.queryOne<{ type_name: string | null; display_name: string }>(
`SELECT type_name, display_name FROM object_types WHERE schema_id = ? AND jira_type_id = ?`,
[schemaRow.id, objectTypeId]
);
let typeNameToSet = currentType?.type_name;
const needsTypeNameFix = update.enabled && (!typeNameToSet || typeNameToSet.trim() === '');
if (needsTypeNameFix && currentType?.display_name) {
// Try to generate type_name from display_name (PascalCase)
const { toPascalCase } = await import('./schemaUtils.js');
typeNameToSet = toPascalCase(currentType.display_name);
logger.warn(`SchemaConfiguration: Type ${update.id} has missing type_name. Auto-generating "${typeNameToSet}" from display_name "${currentType.display_name}"`);
}
if (txDb.isPostgres) {
if (needsTypeNameFix && typeNameToSet) {
await txDb.execute(`
UPDATE object_types
SET enabled = ?, type_name = ?, updated_at = ?
WHERE schema_id = ? AND jira_type_id = ?
`, [update.enabled, typeNameToSet, now, schemaRow.id, objectTypeId]);
} else {
await txDb.execute(`
UPDATE object_types
SET enabled = ?, updated_at = ?
WHERE schema_id = ? AND jira_type_id = ?
`, [update.enabled, now, schemaRow.id, objectTypeId]);
}
} else {
if (needsTypeNameFix && typeNameToSet) {
await txDb.execute(`
UPDATE object_types
SET enabled = ?, type_name = ?, updated_at = ?
WHERE schema_id = ? AND jira_type_id = ?
`, [update.enabled ? 1 : 0, typeNameToSet, now, schemaRow.id, objectTypeId]);
} else {
await txDb.execute(`
UPDATE object_types
SET enabled = ?, updated_at = ?
WHERE schema_id = ? AND jira_type_id = ?
`, [update.enabled ? 1 : 0, now, schemaRow.id, objectTypeId]);
}
}
}
});
logger.info(`SchemaConfiguration: Bulk updated ${updates.length} object types`);
}
/**
* Get enabled object types (for sync engine)
*/
async getEnabledObjectTypes(): Promise<Array<{
schemaId: string;
objectTypeId: number;
objectTypeName: string;
displayName: string;
}>> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
// Use parameterized query to avoid boolean/integer comparison issues
const rows = await db.query<{
jira_schema_id: string;
jira_type_id: number;
type_name: string;
display_name: string;
}>(
`SELECT s.jira_schema_id, ot.jira_type_id, ot.type_name, ot.display_name
FROM object_types ot
JOIN schemas s ON ot.schema_id = s.id
WHERE ot.enabled = ?`,
[db.isPostgres ? true : 1]
);
return rows.map(row => ({
schemaId: row.jira_schema_id,
objectTypeId: row.jira_type_id,
objectTypeName: row.type_name,
displayName: row.display_name,
}));
}
/**
* Check if configuration is complete (at least one object type enabled)
*/
async isConfigurationComplete(): Promise<boolean> {
const enabledTypes = await this.getEnabledObjectTypes();
return enabledTypes.length > 0;
}
/**
* Get configuration statistics
*/
async getConfigurationStats(): Promise<{
totalSchemas: number;
totalObjectTypes: number;
enabledObjectTypes: number;
disabledObjectTypes: number;
isConfigured: boolean;
}> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
const totalRow = await db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count FROM object_types
`);
// Use parameterized query to avoid boolean/integer comparison issues
const enabledRow = await db.queryOne<{ count: number }>(
`SELECT COUNT(*) as count FROM object_types WHERE enabled = ?`,
[db.isPostgres ? true : 1]
);
const schemaRow = await db.queryOne<{ count: number }>(`
SELECT COUNT(*) as count FROM schemas
`);
const total = totalRow?.count || 0;
const enabled = enabledRow?.count || 0;
const schemas = schemaRow?.count || 0;
return {
totalSchemas: schemas,
totalObjectTypes: total,
enabledObjectTypes: enabled,
disabledObjectTypes: total - enabled,
isConfigured: enabled > 0,
};
}
/**
* Get all schemas with their search enabled status
*/
async getSchemas(): Promise<Array<{
schemaId: string;
schemaName: string;
searchEnabled: boolean;
}>> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
const rows = await db.query<{
jira_schema_id: string;
name: string;
search_enabled: boolean | number;
}>(`
SELECT jira_schema_id, name, search_enabled
FROM schemas
ORDER BY name ASC
`);
return rows.map(row => ({
schemaId: row.jira_schema_id,
schemaName: row.name,
searchEnabled: typeof row.search_enabled === 'boolean' ? row.search_enabled : row.search_enabled === 1,
}));
}
/**
* Set search enabled status for a schema
*/
async setSchemaSearchEnabled(schemaId: string, searchEnabled: boolean): Promise<void> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
const now = new Date().toISOString();
if (db.isPostgres) {
await db.execute(`
UPDATE schemas
SET search_enabled = ?, updated_at = ?
WHERE jira_schema_id = ?
`, [searchEnabled, now, schemaId]);
} else {
await db.execute(`
UPDATE schemas
SET search_enabled = ?, updated_at = ?
WHERE jira_schema_id = ?
`, [searchEnabled ? 1 : 0, now, schemaId]);
}
logger.info(`SchemaConfiguration: Set schema ${schemaId} search_enabled=${searchEnabled}`);
}
}
export const schemaConfigurationService = new SchemaConfigurationService();

View File

@@ -0,0 +1,182 @@
/**
* Schema Discovery Service
*
* Provides access to discovered schema data from the database.
* Schema synchronization is handled by SchemaSyncService.
* This service provides read-only access to the discovered schema.
*/
import { logger } from './logger.js';
import { getDatabaseAdapter } from './database/singleton.js';
import type { DatabaseAdapter } from './database/interface.js';
import { schemaSyncService } from './SchemaSyncService.js';
import type { ObjectTypeDefinition, AttributeDefinition } from '../generated/jira-schema.js';
// Jira API Types (kept for reference, but not used in this service anymore)
class SchemaDiscoveryService {
private db: DatabaseAdapter;
private isPostgres: boolean;
constructor() {
// Use shared database adapter singleton
this.db = getDatabaseAdapter();
// Determine if PostgreSQL based on adapter type
this.isPostgres = (this.db.isPostgres === true);
}
/**
* Discover schema from Jira Assets API and populate database
* Delegates to SchemaSyncService for the actual synchronization
*/
async discoverAndStoreSchema(force: boolean = false): Promise<void> {
logger.info('SchemaDiscovery: Delegating to SchemaSyncService for schema synchronization...');
await schemaSyncService.syncAll();
}
/**
* Get attribute definition from database
*/
async getAttribute(typeName: string, fieldName: string): Promise<AttributeDefinition | null> {
const row = await this.db.queryOne<{
jira_attr_id: number;
attr_name: string;
field_name: string;
attr_type: string;
is_multiple: boolean | number;
is_editable: boolean | number;
is_required: boolean | number;
is_system: boolean | number;
reference_type_name: string | null;
description: string | null;
}>(`
SELECT * FROM attributes
WHERE object_type_name = ? AND field_name = ?
`, [typeName, fieldName]);
if (!row) return null;
// Convert boolean/number for SQLite compatibility
const isMultiple = typeof row.is_multiple === 'boolean' ? row.is_multiple : row.is_multiple === 1;
const isEditable = typeof row.is_editable === 'boolean' ? row.is_editable : row.is_editable === 1;
const isRequired = typeof row.is_required === 'boolean' ? row.is_required : row.is_required === 1;
const isSystem = typeof row.is_system === 'boolean' ? row.is_system : row.is_system === 1;
return {
jiraId: row.jira_attr_id,
name: row.attr_name,
fieldName: row.field_name,
type: row.attr_type as AttributeDefinition['type'],
isMultiple,
isEditable,
isRequired,
isSystem,
referenceTypeName: row.reference_type_name || undefined,
description: row.description || undefined,
};
}
/**
* Get all attributes for a type
*/
async getAttributesForType(typeName: string): Promise<AttributeDefinition[]> {
const rows = await this.db.query<{
jira_attr_id: number;
attr_name: string;
field_name: string;
attr_type: string;
is_multiple: boolean | number;
is_editable: boolean | number;
is_required: boolean | number;
is_system: boolean | number;
reference_type_name: string | null;
description: string | null;
position: number | null;
}>(`
SELECT * FROM attributes
WHERE object_type_name = ?
ORDER BY COALESCE(position, 0), jira_attr_id
`, [typeName]);
return rows.map(row => {
// Convert boolean/number for SQLite compatibility
const isMultiple = typeof row.is_multiple === 'boolean' ? row.is_multiple : row.is_multiple === 1;
const isEditable = typeof row.is_editable === 'boolean' ? row.is_editable : row.is_editable === 1;
const isRequired = typeof row.is_required === 'boolean' ? row.is_required : row.is_required === 1;
const isSystem = typeof row.is_system === 'boolean' ? row.is_system : row.is_system === 1;
return {
jiraId: row.jira_attr_id,
name: row.attr_name,
fieldName: row.field_name,
type: row.attr_type as AttributeDefinition['type'],
isMultiple,
isEditable,
isRequired,
isSystem,
referenceTypeName: row.reference_type_name || undefined,
description: row.description || undefined,
position: row.position ?? 0,
};
});
}
/**
* Get object type definition from database
*/
async getObjectType(typeName: string): Promise<ObjectTypeDefinition | null> {
const row = await this.db.queryOne<{
jira_type_id: number;
type_name: string;
display_name: string;
description: string | null;
sync_priority: number;
object_count: number;
}>(`
SELECT * FROM object_types
WHERE type_name = ?
`, [typeName]);
if (!row) return null;
const attributes = await this.getAttributesForType(typeName);
return {
jiraTypeId: row.jira_type_id,
name: row.display_name,
typeName: row.type_name,
syncPriority: row.sync_priority,
objectCount: row.object_count,
attributes,
};
}
/**
* Get attribute ID by type and field name or attribute name
* Supports both fieldName (camelCase) and name (display name) for flexibility
*/
async getAttributeId(typeName: string, fieldNameOrName: string): Promise<number | null> {
// Try field_name first (camelCase)
let row = await this.db.queryOne<{ id: number }>(`
SELECT id FROM attributes
WHERE object_type_name = ? AND field_name = ?
`, [typeName, fieldNameOrName]);
// If not found, try attr_name (display name)
if (!row) {
row = await this.db.queryOne<{ id: number }>(`
SELECT id FROM attributes
WHERE object_type_name = ? AND attr_name = ?
`, [typeName, fieldNameOrName]);
}
return row?.id || null;
}
}
// Export singleton instance
export const schemaDiscoveryService = new SchemaDiscoveryService();

View File

@@ -0,0 +1,380 @@
/**
* Schema Mapping Service
*
* Manages mappings between object types and their Jira Assets schema IDs.
* Allows different object types to exist in different schemas.
*/
import { logger } from './logger.js';
import { normalizedCacheStore } from './normalizedCacheStore.js';
import { config } from '../config/env.js';
import type { CMDBObjectTypeName } from '../generated/jira-types.js';
export interface SchemaMapping {
objectTypeName: string;
schemaId: string;
enabled: boolean;
createdAt: string;
updatedAt: string;
}
class SchemaMappingService {
private cache: Map<string, string> = new Map(); // objectTypeName -> schemaId
private cacheInitialized: boolean = false;
/**
* Get schema ID for an object type
* Returns the configured schema ID or the default from config
*/
async getSchemaId(objectTypeName: CMDBObjectTypeName | string): Promise<string> {
await this.ensureCacheInitialized();
// Check cache first
if (this.cache.has(objectTypeName)) {
return this.cache.get(objectTypeName)!;
}
// Try to get schema ID from database (from enabled object types)
try {
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const enabledTypes = await schemaConfigurationService.getEnabledObjectTypes();
const type = enabledTypes.find(et => et.objectTypeName === objectTypeName);
if (type) {
return type.schemaId;
}
} catch (error) {
logger.warn(`SchemaMapping: Failed to get schema ID from database for ${objectTypeName}`, error);
}
// Return empty string if not found (no default)
return '';
}
/**
* Get all schema mappings
*/
async getAllMappings(): Promise<SchemaMapping[]> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
const rows = await db.query<{
object_type_name: string;
schema_id: string;
enabled: boolean | number;
created_at: string;
updated_at: string;
}>(`
SELECT object_type_name, schema_id, enabled, created_at, updated_at
FROM schema_mappings
ORDER BY object_type_name
`);
return rows.map(row => ({
objectTypeName: row.object_type_name,
schemaId: row.schema_id,
enabled: typeof row.enabled === 'boolean' ? row.enabled : row.enabled === 1,
createdAt: row.created_at,
updatedAt: row.updated_at,
}));
}
/**
* Set schema mapping for an object type
*/
async setMapping(objectTypeName: string, schemaId: string, enabled: boolean = true): Promise<void> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
const now = new Date().toISOString();
if (db.isPostgres) {
await db.execute(`
INSERT INTO schema_mappings (object_type_name, schema_id, enabled, created_at, updated_at)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(object_type_name) DO UPDATE SET
schema_id = excluded.schema_id,
enabled = excluded.enabled,
updated_at = excluded.updated_at
`, [objectTypeName, schemaId, enabled, now, now]);
} else {
await db.execute(`
INSERT INTO schema_mappings (object_type_name, schema_id, enabled, created_at, updated_at)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(object_type_name) DO UPDATE SET
schema_id = excluded.schema_id,
enabled = excluded.enabled,
updated_at = excluded.updated_at
`, [objectTypeName, schemaId, enabled ? 1 : 0, now, now]);
}
// Update cache
if (enabled) {
this.cache.set(objectTypeName, schemaId);
} else {
this.cache.delete(objectTypeName);
}
logger.info(`SchemaMappingService: Set mapping for ${objectTypeName} -> schema ${schemaId} (enabled: ${enabled})`);
}
/**
* Delete schema mapping for an object type (will use default schema)
*/
async deleteMapping(objectTypeName: string): Promise<void> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
await db.execute(`
DELETE FROM schema_mappings WHERE object_type_name = ?
`, [objectTypeName]);
// Remove from cache
this.cache.delete(objectTypeName);
logger.info(`SchemaMappingService: Deleted mapping for ${objectTypeName}`);
}
/**
* Check if an object type should be synced (has enabled mapping or uses default schema)
*/
async isTypeEnabled(objectTypeName: string): Promise<boolean> {
await this.ensureCacheInitialized();
// If there's a mapping, check if it's enabled
if (this.cache.has(objectTypeName)) {
// Check if it's actually enabled in the database
const db = (normalizedCacheStore as any).db;
if (db) {
await db.ensureInitialized?.();
const row = await db.queryOne<{ enabled: boolean | number }>(`
SELECT enabled FROM schema_mappings WHERE object_type_name = ?
`, [objectTypeName]);
if (row) {
return typeof row.enabled === 'boolean' ? row.enabled : row.enabled === 1;
}
}
}
// If no mapping exists, check if object type is enabled in database
try {
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const enabledTypes = await schemaConfigurationService.getEnabledObjectTypes();
return enabledTypes.some(et => et.objectTypeName === objectTypeName);
} catch (error) {
logger.warn(`SchemaMapping: Failed to check if ${objectTypeName} is enabled`, error);
return false;
}
}
/**
* Initialize cache from database
*/
private async ensureCacheInitialized(): Promise<void> {
if (this.cacheInitialized) return;
try {
const db = (normalizedCacheStore as any).db;
if (!db) {
this.cacheInitialized = true;
return;
}
await db.ensureInitialized?.();
// Use parameterized query to avoid boolean/integer comparison issues
const rows = await db.query<{
object_type_name: string;
schema_id: string;
enabled: boolean | number;
}>(
`SELECT object_type_name, schema_id, enabled
FROM schema_mappings
WHERE enabled = ?`,
[db.isPostgres ? true : 1]
);
this.cache.clear();
for (const row of rows) {
const enabled = typeof row.enabled === 'boolean' ? row.enabled : row.enabled === 1;
if (enabled) {
this.cache.set(row.object_type_name, row.schema_id);
}
}
this.cacheInitialized = true;
logger.debug(`SchemaMappingService: Initialized cache with ${this.cache.size} mappings`);
} catch (error) {
logger.warn('SchemaMappingService: Failed to initialize cache, using defaults', error);
this.cacheInitialized = true; // Mark as initialized to prevent retry loops
}
}
/**
* Get all object types with their sync configuration
*/
async getAllObjectTypesWithConfig(): Promise<Array<{
typeName: string;
displayName: string;
description: string | null;
schemaId: string | null;
enabled: boolean;
objectCount: number;
syncPriority: number;
}>> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
try {
// Get all object types with their mappings
const rows = await db.query<{
type_name: string;
display_name: string;
description: string | null;
object_count: number;
sync_priority: number;
schema_id: string | null;
enabled: boolean | number | null;
}>(`
SELECT
ot.type_name,
ot.display_name,
ot.description,
ot.object_count,
ot.sync_priority,
sm.schema_id,
sm.enabled
FROM object_types ot
LEFT JOIN schema_mappings sm ON ot.type_name = sm.object_type_name
ORDER BY ot.sync_priority ASC, ot.display_name ASC
`);
logger.debug(`SchemaMappingService: Found ${rows.length} object types in database`);
// Get first available schema ID from database
let defaultSchemaId: string | null = null;
try {
const { normalizedCacheStore } = await import('./normalizedCacheStore.js');
const db = (normalizedCacheStore as any).db;
if (db) {
await db.ensureInitialized?.();
const schemaRow = await db.queryOne<{ jira_schema_id: string }>(
`SELECT jira_schema_id FROM schemas ORDER BY jira_schema_id LIMIT 1`
);
defaultSchemaId = schemaRow?.jira_schema_id || null;
}
} catch (error) {
logger.warn('SchemaMapping: Failed to get default schema ID from database', error);
}
return rows.map(row => ({
typeName: row.type_name,
displayName: row.display_name,
description: row.description,
schemaId: row.schema_id || defaultSchemaId,
enabled: row.enabled === null
? true // Default: enabled if no mapping exists
: (typeof row.enabled === 'boolean' ? row.enabled : row.enabled === 1),
objectCount: row.object_count,
syncPriority: row.sync_priority,
}));
} catch (error) {
logger.error('SchemaMappingService: Failed to get object types with config', error);
throw error;
}
}
/**
* Enable or disable an object type for syncing
*/
async setTypeEnabled(objectTypeName: string, enabled: boolean): Promise<void> {
const db = (normalizedCacheStore as any).db;
if (!db) {
throw new Error('Database not available');
}
await db.ensureInitialized?.();
// Check if mapping exists
const existing = await db.queryOne<{ schema_id: string }>(`
SELECT schema_id FROM schema_mappings WHERE object_type_name = ?
`, [objectTypeName]);
// Get schema ID from existing mapping or from database
let schemaId = existing?.schema_id || '';
if (!schemaId) {
// Try to get schema ID from database (from enabled object types)
try {
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const enabledTypes = await schemaConfigurationService.getEnabledObjectTypes();
const type = enabledTypes.find(et => et.objectTypeName === objectTypeName);
if (type) {
schemaId = type.schemaId;
}
} catch (error) {
logger.warn(`SchemaMapping: Failed to get schema ID from database for ${objectTypeName}`, error);
}
}
if (!schemaId) {
throw new Error(`No schema ID available for object type ${objectTypeName}. Please ensure the object type is discovered and configured.`);
}
// Create or update mapping
const now = new Date().toISOString();
if (db.isPostgres) {
await db.execute(`
INSERT INTO schema_mappings (object_type_name, schema_id, enabled, created_at, updated_at)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(object_type_name) DO UPDATE SET
enabled = excluded.enabled,
updated_at = excluded.updated_at
`, [objectTypeName, schemaId, enabled, now, now]);
} else {
await db.execute(`
INSERT INTO schema_mappings (object_type_name, schema_id, enabled, created_at, updated_at)
VALUES (?, ?, ?, ?, ?)
ON CONFLICT(object_type_name) DO UPDATE SET
enabled = excluded.enabled,
updated_at = excluded.updated_at
`, [objectTypeName, schemaId, enabled ? 1 : 0, now, now]);
}
// Update cache
if (enabled) {
this.cache.set(objectTypeName, schemaId);
} else {
this.cache.delete(objectTypeName);
}
this.clearCache();
logger.info(`SchemaMappingService: Set ${objectTypeName} enabled=${enabled}`);
}
/**
* Clear cache (useful after updates)
*/
clearCache(): void {
this.cache.clear();
this.cacheInitialized = false;
}
}
export const schemaMappingService = new SchemaMappingService();

View File

@@ -0,0 +1,149 @@
/**
* Schema Utility Functions
*
* Helper functions for schema discovery and type conversion
*/
// Jira attribute type mappings (based on Jira Insight/Assets API)
const JIRA_TYPE_MAP: Record<number, 'text' | 'integer' | 'float' | 'boolean' | 'date' | 'datetime' | 'select' | 'reference' | 'url' | 'email' | 'textarea' | 'user' | 'status' | 'unknown'> = {
0: 'text', // Default/Text
1: 'integer', // Integer
2: 'boolean', // Boolean
3: 'float', // Double/Float
4: 'date', // Date
5: 'datetime', // DateTime
6: 'url', // URL
7: 'email', // Email
8: 'textarea', // Textarea
9: 'select', // Select
10: 'reference', // Reference (Object)
11: 'user', // User
12: 'reference', // Confluence (treated as reference)
13: 'reference', // Group (treated as reference)
14: 'reference', // Version (treated as reference)
15: 'reference', // Project (treated as reference)
16: 'status', // Status
};
// Priority types - these sync first as they are reference data
const PRIORITY_TYPE_NAMES = new Set([
'Application Component',
'Server',
'Flows',
]);
// Reference data types - these sync with lower priority
const REFERENCE_TYPE_PATTERNS = [
/Factor$/,
/Model$/,
/Type$/,
/Category$/,
/Importance$/,
/Analyse$/,
/Organisation$/,
/Function$/,
];
/**
* Convert a string to camelCase while preserving existing casing patterns
* E.g., "Application Function" -> "applicationFunction"
* "ICT Governance Model" -> "ictGovernanceModel"
* "ApplicationFunction" -> "applicationFunction"
*/
export function toCamelCase(str: string): string {
// First split on spaces and special chars
const words = str
.replace(/[^a-zA-Z0-9\s]/g, ' ')
.split(/\s+/)
.filter(w => w.length > 0);
if (words.length === 0) return '';
// If it's a single word that's already camelCase or PascalCase, just lowercase first char
if (words.length === 1) {
const word = words[0];
return word.charAt(0).toLowerCase() + word.slice(1);
}
// Multiple words - first word lowercase, rest capitalize first letter
return words
.map((word, index) => {
if (index === 0) {
// First word: if all uppercase (acronym), lowercase it, otherwise just lowercase first char
if (word === word.toUpperCase() && word.length > 1) {
return word.toLowerCase();
}
return word.charAt(0).toLowerCase() + word.slice(1);
}
// Other words: capitalize first letter, keep rest as-is
return word.charAt(0).toUpperCase() + word.slice(1);
})
.join('');
}
/**
* Convert a string to PascalCase while preserving existing casing patterns
* E.g., "Application Function" -> "ApplicationFunction"
* "ICT Governance Model" -> "IctGovernanceModel"
* "applicationFunction" -> "ApplicationFunction"
*/
export function toPascalCase(str: string): string {
// First split on spaces and special chars
const words = str
.replace(/[^a-zA-Z0-9\s]/g, ' ')
.split(/\s+/)
.filter(w => w.length > 0);
if (words.length === 0) return '';
// If it's a single word, just capitalize first letter
if (words.length === 1) {
const word = words[0];
return word.charAt(0).toUpperCase() + word.slice(1);
}
// Multiple words - capitalize first letter of each
return words
.map(word => {
// If all uppercase (acronym) and first word, just capitalize first letter
if (word === word.toUpperCase() && word.length > 1) {
return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
}
return word.charAt(0).toUpperCase() + word.slice(1);
})
.join('');
}
/**
* Map Jira attribute type ID to our type system
*/
export function mapJiraType(typeId: number): 'text' | 'integer' | 'float' | 'boolean' | 'date' | 'datetime' | 'select' | 'reference' | 'url' | 'email' | 'textarea' | 'user' | 'status' | 'unknown' {
return JIRA_TYPE_MAP[typeId] || 'unknown';
}
/**
* Determine sync priority for an object type
*/
export function determineSyncPriority(typeName: string, objectCount: number): number {
// Application Component and related main types first
if (PRIORITY_TYPE_NAMES.has(typeName)) {
return 1;
}
// Reference data types last
for (const pattern of REFERENCE_TYPE_PATTERNS) {
if (pattern.test(typeName)) {
return 10;
}
}
// Medium priority for types with more objects
if (objectCount > 100) {
return 2;
}
if (objectCount > 10) {
return 5;
}
return 8;
}

View File

@@ -8,10 +8,12 @@
*/
import { logger } from './logger.js';
import { cacheStore } from './cacheStore.js';
import { normalizedCacheStore as cacheStore } from './normalizedCacheStore.js';
import { jiraAssetsClient, JiraObjectNotFoundError } from './jiraAssetsClient.js';
import { OBJECT_TYPES, getObjectTypesBySyncPriority } from '../generated/jira-schema.js';
import type { CMDBObject, CMDBObjectTypeName } from '../generated/jira-types.js';
import { schemaDiscoveryService } from './schemaDiscoveryService.js';
import type { ObjectEntry } from '../domain/jiraAssetsPayload.js';
// =============================================================================
// Types
@@ -61,6 +63,7 @@ class SyncEngine {
private incrementalInterval: number;
private batchSize: number;
private lastIncrementalSync: Date | null = null;
private lastConfigCheck: number = 0; // Track last config check time to avoid log spam
constructor() {
this.incrementalInterval = parseInt(
@@ -93,7 +96,26 @@ class SyncEngine {
logger.info('SyncEngine: Sync uses service account token (JIRA_SERVICE_ACCOUNT_TOKEN) from .env');
this.isRunning = true;
// Sync can run automatically using service account token
// Check if configuration is complete before starting scheduler
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const isConfigured = await schemaConfigurationService.isConfigurationComplete();
// Start incremental sync scheduler if token is available AND configuration is complete
if (jiraAssetsClient.hasToken()) {
if (isConfigured) {
this.startIncrementalSyncScheduler();
logger.info('SyncEngine: Incremental sync scheduler started (configuration complete)');
} else {
logger.info('SyncEngine: Incremental sync scheduler NOT started - schema configuration not complete. Please configure object types in settings first.');
// Start scheduler but it will check configuration on each run
// This allows scheduler to start automatically when configuration is completed later
this.startIncrementalSyncScheduler();
logger.info('SyncEngine: Incremental sync scheduler started (will check configuration on each run)');
}
} else {
logger.info('SyncEngine: Service account token not configured, incremental sync disabled');
}
logger.info('SyncEngine: Initialized (using service account token for sync operations)');
}
@@ -163,14 +185,42 @@ class SyncEngine {
logger.info('SyncEngine: Starting full sync...');
try {
// Get object types sorted by sync priority
const objectTypes = getObjectTypesBySyncPriority();
// Check if configuration is complete
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const isConfigured = await schemaConfigurationService.isConfigurationComplete();
if (!isConfigured) {
throw new Error('Schema configuration not complete. Please configure at least one object type to be synced in the settings page.');
}
for (const typeDef of objectTypes) {
const typeStat = await this.syncObjectType(typeDef.typeName as CMDBObjectTypeName);
stats.push(typeStat);
totalObjects += typeStat.objectsProcessed;
totalRelations += typeStat.relationsExtracted;
// Get enabled object types from configuration
logger.info('SyncEngine: Fetching enabled object types from configuration...');
const enabledTypes = await schemaConfigurationService.getEnabledObjectTypes();
logger.info(`SyncEngine: Found ${enabledTypes.length} enabled object types to sync`);
if (enabledTypes.length === 0) {
throw new Error('No object types enabled for syncing. Please enable at least one object type in the settings page.');
}
// Schema discovery will happen automatically when needed (e.g., for relation extraction)
// It's no longer required upfront - the user has already configured which object types to sync
logger.info('SyncEngine: Starting object sync for configured object types...');
// Sync each enabled object type
for (const enabledType of enabledTypes) {
try {
const typeStat = await this.syncConfiguredObjectType(enabledType);
stats.push(typeStat);
totalObjects += typeStat.objectsProcessed;
totalRelations += typeStat.relationsExtracted;
} catch (error) {
logger.error(`SyncEngine: Failed to sync ${enabledType.displayName}`, error);
stats.push({
objectType: enabledType.displayName,
objectsProcessed: 0,
relationsExtracted: 0,
duration: 0,
});
}
}
// Update sync metadata
@@ -205,81 +255,216 @@ class SyncEngine {
}
/**
* Sync a single object type
* Store an object and all its nested referenced objects recursively
* This method processes the entire object tree, storing all nested objects
* and extracting all relations, while preventing infinite loops with circular references.
*
* @param entry - The object entry to store (in ObjectEntry format from API)
* @param typeName - The type name of the object
* @param processedIds - Set of already processed object IDs (to prevent duplicates and circular refs)
* @returns Statistics about objects stored and relations extracted
*/
private async syncObjectType(typeName: CMDBObjectTypeName): Promise<SyncStats> {
private async storeObjectTree(
entry: ObjectEntry,
typeName: CMDBObjectTypeName,
processedIds: Set<string>
): Promise<{ objectsStored: number; relationsExtracted: number }> {
const entryId = String(entry.id);
// Skip if already processed (handles circular references)
if (processedIds.has(entryId)) {
logger.debug(`SyncEngine: Skipping already processed object ${entry.objectKey || entryId} of type ${typeName}`);
return { objectsStored: 0, relationsExtracted: 0 };
}
processedIds.add(entryId);
let objectsStored = 0;
let relationsExtracted = 0;
try {
logger.debug(`SyncEngine: [Recursive] Storing object tree for ${entry.objectKey || entryId} of type ${typeName} (depth: ${processedIds.size - 1})`);
// 1. Adapt and parse the object
const adapted = jiraAssetsClient.adaptObjectEntryToJiraAssetsObject(entry);
if (!adapted) {
logger.warn(`SyncEngine: Failed to adapt object ${entry.objectKey || entryId}`);
return { objectsStored: 0, relationsExtracted: 0 };
}
const parsed = await jiraAssetsClient.parseObject(adapted);
if (!parsed) {
logger.warn(`SyncEngine: Failed to parse object ${entry.objectKey || entryId}`);
return { objectsStored: 0, relationsExtracted: 0 };
}
// 2. Store the object
await cacheStore.upsertObject(typeName, parsed);
objectsStored++;
logger.debug(`SyncEngine: Stored object ${parsed.objectKey || parsed.id} of type ${typeName}`);
// 3. Schema discovery must be manually triggered via API endpoints
// No automatic discovery
// 4. Extract and store relations for this object
await cacheStore.extractAndStoreRelations(typeName, parsed);
relationsExtracted++;
logger.debug(`SyncEngine: Extracted relations for object ${parsed.objectKey || parsed.id}`);
// 5. Recursively process nested referenced objects
// Note: Lookup maps should already be initialized by getAllObjectsOfType
// Use a separate Set for extraction to avoid conflicts with storage tracking
const extractionProcessedIds = new Set<string>();
const nestedRefs = jiraAssetsClient.extractNestedReferencedObjects(
entry,
extractionProcessedIds, // Separate Set for extraction (prevents infinite loops in traversal)
5, // max depth
0 // current depth
);
if (nestedRefs.length > 0) {
logger.debug(`SyncEngine: [Recursive] Found ${nestedRefs.length} nested referenced objects for ${entry.objectKey || entryId}`);
// Group by type for better logging
const refsByType = new Map<string, number>();
for (const ref of nestedRefs) {
refsByType.set(ref.typeName, (refsByType.get(ref.typeName) || 0) + 1);
}
const typeSummary = Array.from(refsByType.entries())
.map(([type, count]) => `${count} ${type}`)
.join(', ');
logger.debug(`SyncEngine: [Recursive] Nested objects by type: ${typeSummary}`);
}
// 6. Recursively store each nested object
for (const { entry: nestedEntry, typeName: nestedTypeName } of nestedRefs) {
logger.debug(`SyncEngine: [Recursive] Processing nested object ${nestedEntry.objectKey || nestedEntry.id} of type ${nestedTypeName}`);
const nestedResult = await this.storeObjectTree(
nestedEntry,
nestedTypeName as CMDBObjectTypeName,
processedIds
);
objectsStored += nestedResult.objectsStored;
relationsExtracted += nestedResult.relationsExtracted;
}
logger.debug(`SyncEngine: [Recursive] Completed storing object tree for ${entry.objectKey || entryId}: ${objectsStored} objects, ${relationsExtracted} relations`);
return { objectsStored, relationsExtracted };
} catch (error) {
logger.error(`SyncEngine: Failed to store object tree for ${entry.objectKey || entryId}`, error);
return { objectsStored, relationsExtracted };
}
}
/**
* Sync a configured object type (from schema configuration)
*/
private async syncConfiguredObjectType(enabledType: {
schemaId: string;
objectTypeId: number;
objectTypeName: string;
displayName: string;
}): Promise<SyncStats> {
const startTime = Date.now();
let objectsProcessed = 0;
let relationsExtracted = 0;
try {
const typeDef = OBJECT_TYPES[typeName];
if (!typeDef) {
logger.warn(`SyncEngine: Unknown type ${typeName}`);
return { objectType: typeName, objectsProcessed: 0, relationsExtracted: 0, duration: 0 };
}
logger.info(`SyncEngine: Syncing ${enabledType.displayName} (${enabledType.objectTypeName}) from schema ${enabledType.schemaId}...`);
logger.debug(`SyncEngine: Syncing ${typeName}...`);
// Fetch all objects from Jira using the configured schema and object type
// This returns raw entries for recursive processing (includeAttributesDeep=2 provides nested data)
const { objects: jiraObjects, rawEntries } = await jiraAssetsClient.getAllObjectsOfType(
enabledType.displayName, // Use display name for Jira API
this.batchSize,
enabledType.schemaId
);
logger.info(`SyncEngine: Fetched ${jiraObjects.length} ${enabledType.displayName} objects from Jira (schema: ${enabledType.schemaId})`);
// Fetch all objects from Jira
const jiraObjects = await jiraAssetsClient.getAllObjectsOfType(typeName, this.batchSize);
logger.info(`SyncEngine: Fetched ${jiraObjects.length} ${typeName} objects from Jira`);
// Schema discovery must be manually triggered via API endpoints
// No automatic discovery
// Parse and cache objects
const parsedObjects: CMDBObject[] = [];
// Use objectTypeName for cache storage (PascalCase)
const typeName = enabledType.objectTypeName as CMDBObjectTypeName;
// Process each main object recursively using storeObjectTree
// This will store the object and all its nested referenced objects
const processedIds = new Set<string>(); // Track processed objects to prevent duplicates and circular refs
const failedObjects: Array<{ id: string; key: string; label: string; reason: string }> = [];
for (const jiraObj of jiraObjects) {
const parsed = jiraAssetsClient.parseObject(jiraObj);
if (parsed) {
parsedObjects.push(parsed);
} else {
// Track objects that failed to parse
failedObjects.push({
id: jiraObj.id?.toString() || 'unknown',
key: jiraObj.objectKey || 'unknown',
label: jiraObj.label || 'unknown',
reason: 'parseObject returned null',
});
logger.warn(`SyncEngine: Failed to parse ${typeName} object: ${jiraObj.objectKey || jiraObj.id} (${jiraObj.label || 'unknown label'})`);
if (rawEntries && rawEntries.length > 0) {
logger.info(`SyncEngine: Processing ${rawEntries.length} ${enabledType.displayName} objects recursively...`);
for (const rawEntry of rawEntries) {
try {
const result = await this.storeObjectTree(rawEntry, typeName, processedIds);
objectsProcessed += result.objectsStored;
relationsExtracted += result.relationsExtracted;
} catch (error) {
const entryId = String(rawEntry.id);
failedObjects.push({
id: entryId,
key: rawEntry.objectKey || 'unknown',
label: rawEntry.label || 'unknown',
reason: error instanceof Error ? error.message : 'Unknown error',
});
logger.warn(`SyncEngine: Failed to store object tree for ${enabledType.displayName} object: ${rawEntry.objectKey || entryId} (${rawEntry.label || 'unknown label'})`, error);
}
}
} else {
// Fallback: if rawEntries not available, use adapted objects (less efficient, no recursion)
logger.warn(`SyncEngine: Raw entries not available, using fallback linear processing (no recursive nesting)`);
const parsedObjects: CMDBObject[] = [];
for (const jiraObj of jiraObjects) {
const parsed = await jiraAssetsClient.parseObject(jiraObj);
if (parsed) {
parsedObjects.push(parsed);
} else {
failedObjects.push({
id: jiraObj.id?.toString() || 'unknown',
key: jiraObj.objectKey || 'unknown',
label: jiraObj.label || 'unknown',
reason: 'parseObject returned null',
});
logger.warn(`SyncEngine: Failed to parse ${enabledType.displayName} object: ${jiraObj.objectKey || jiraObj.id} (${jiraObj.label || 'unknown label'})`);
}
}
if (parsedObjects.length > 0) {
await cacheStore.batchUpsertObjects(typeName, parsedObjects);
objectsProcessed = parsedObjects.length;
// Extract relations
for (const obj of parsedObjects) {
await cacheStore.extractAndStoreRelations(typeName, obj);
relationsExtracted++;
}
}
}
// Log parsing statistics
if (failedObjects.length > 0) {
logger.warn(`SyncEngine: ${failedObjects.length} ${typeName} objects failed to parse:`, failedObjects.map(o => `${o.key} (${o.label})`).join(', '));
}
// Batch upsert to cache
if (parsedObjects.length > 0) {
await cacheStore.batchUpsertObjects(typeName, parsedObjects);
objectsProcessed = parsedObjects.length;
// Extract relations
for (const obj of parsedObjects) {
await cacheStore.extractAndStoreRelations(typeName, obj);
relationsExtracted++;
}
logger.warn(`SyncEngine: ${failedObjects.length} ${enabledType.displayName} objects failed to process:`, failedObjects.map(o => `${o.key} (${o.label}): ${o.reason}`).join(', '));
}
const duration = Date.now() - startTime;
const skippedCount = jiraObjects.length - objectsProcessed;
if (skippedCount > 0) {
logger.warn(`SyncEngine: Synced ${objectsProcessed}/${jiraObjects.length} ${typeName} objects in ${duration}ms (${skippedCount} skipped)`);
logger.warn(`SyncEngine: Synced ${objectsProcessed}/${jiraObjects.length} ${enabledType.displayName} objects in ${duration}ms (${skippedCount} skipped)`);
} else {
logger.debug(`SyncEngine: Synced ${objectsProcessed} ${typeName} objects in ${duration}ms`);
logger.debug(`SyncEngine: Synced ${objectsProcessed} ${enabledType.displayName} objects in ${duration}ms`);
}
return {
objectType: typeName,
objectType: enabledType.displayName,
objectsProcessed,
relationsExtracted,
duration,
};
} catch (error) {
logger.error(`SyncEngine: Failed to sync ${typeName}`, error);
logger.error(`SyncEngine: Failed to sync ${enabledType.displayName}`, error);
return {
objectType: typeName,
objectType: enabledType.displayName,
objectsProcessed,
relationsExtracted,
duration: Date.now() - startTime,
@@ -287,12 +472,27 @@ class SyncEngine {
}
}
/**
* Sync a single object type (legacy method, kept for backward compatibility)
*/
private async syncObjectType(typeName: CMDBObjectTypeName): Promise<SyncStats> {
// This method is deprecated - use syncConfiguredObjectType instead
logger.warn(`SyncEngine: syncObjectType(${typeName}) is deprecated, use configured object types instead`);
return {
objectType: typeName,
objectsProcessed: 0,
relationsExtracted: 0,
duration: 0,
};
}
// ==========================================================================
// Incremental Sync
// ==========================================================================
/**
* Start the incremental sync scheduler
* The scheduler will check configuration on each run and only sync if configuration is complete
*/
private startIncrementalSyncScheduler(): void {
if (this.incrementalTimer) {
@@ -300,9 +500,11 @@ class SyncEngine {
}
logger.info(`SyncEngine: Starting incremental sync scheduler (every ${this.incrementalInterval}ms)`);
logger.info('SyncEngine: Scheduler will only perform syncs when schema configuration is complete');
this.incrementalTimer = setInterval(() => {
if (!this.isSyncing && this.isRunning) {
// incrementalSync() will check if configuration is complete before syncing
this.incrementalSync().catch(err => {
logger.error('SyncEngine: Incremental sync failed', err);
});
@@ -324,6 +526,26 @@ class SyncEngine {
return { success: false, updatedCount: 0 };
}
// Check if configuration is complete before attempting sync
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const isConfigured = await schemaConfigurationService.isConfigurationComplete();
if (!isConfigured) {
// Don't log on every interval - only log once per minute to avoid spam
const now = Date.now();
if (!this.lastConfigCheck || now - this.lastConfigCheck > 60000) {
logger.debug('SyncEngine: Schema configuration not complete, skipping incremental sync. Please configure object types in settings.');
this.lastConfigCheck = now;
}
return { success: false, updatedCount: 0 };
}
// Get enabled object types - will be used later to filter updated objects
const enabledTypes = await schemaConfigurationService.getEnabledObjectTypes();
if (enabledTypes.length === 0) {
logger.debug('SyncEngine: No enabled object types, skipping incremental sync');
return { success: false, updatedCount: 0 };
}
if (this.isSyncing) {
return { success: false, updatedCount: 0 };
}
@@ -339,6 +561,15 @@ class SyncEngine {
logger.debug(`SyncEngine: Incremental sync since ${since.toISOString()}`);
// Get enabled object types to filter incremental sync
const enabledTypes = await schemaConfigurationService.getEnabledObjectTypes();
const enabledTypeNames = new Set(enabledTypes.map(et => et.objectTypeName));
if (enabledTypeNames.size === 0) {
logger.debug('SyncEngine: No enabled object types, skipping incremental sync');
return { success: false, updatedCount: 0 };
}
// Fetch updated objects from Jira
const updatedObjects = await jiraAssetsClient.getUpdatedObjectsSince(since, this.batchSize);
@@ -368,15 +599,49 @@ class SyncEngine {
return { success: true, updatedCount: 0 };
}
let updatedCount = 0;
// Schema discovery must be manually triggered via API endpoints
// No automatic discovery
let updatedCount = 0;
const processedIds = new Set<string>(); // Track processed objects for recursive sync
// Filter updated objects to only process enabled object types
// Use recursive processing to handle nested references
for (const jiraObj of updatedObjects) {
const parsed = jiraAssetsClient.parseObject(jiraObj);
const parsed = await jiraAssetsClient.parseObject(jiraObj);
if (parsed) {
const typeName = parsed._objectType as CMDBObjectTypeName;
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
updatedCount++;
// Only sync if this object type is enabled
if (!enabledTypeNames.has(typeName)) {
logger.debug(`SyncEngine: Skipping ${typeName} in incremental sync - not enabled`);
continue;
}
// Get raw entry for recursive processing
const objectId = parsed.id;
try {
const entry = await jiraAssetsClient.getObjectEntry(objectId);
if (entry) {
// Use recursive storeObjectTree to process object and all nested references
const result = await this.storeObjectTree(entry, typeName, processedIds);
if (result.objectsStored > 0) {
updatedCount++;
logger.debug(`SyncEngine: Incremental sync processed ${objectId}: ${result.objectsStored} objects, ${result.relationsExtracted} relations`);
}
} else {
// Fallback to linear processing if raw entry not available
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
updatedCount++;
}
} catch (error) {
logger.warn(`SyncEngine: Failed to get raw entry for ${objectId}, using fallback`, error);
// Fallback to linear processing
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
updatedCount++;
}
}
}
@@ -404,6 +669,7 @@ class SyncEngine {
/**
* Trigger a sync for a specific object type
* Only syncs if the object type is enabled in configuration
* Allows concurrent syncs for different types, but blocks if:
* - A full sync is in progress
* - An incremental sync is in progress
@@ -420,10 +686,19 @@ class SyncEngine {
throw new Error(`Sync already in progress for ${typeName}`);
}
// Check if this type is enabled in configuration
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const enabledTypes = await schemaConfigurationService.getEnabledObjectTypes();
const enabledType = enabledTypes.find(et => et.objectTypeName === typeName);
if (!enabledType) {
throw new Error(`Object type ${typeName} is not enabled for syncing. Please enable it in the Schema Configuration settings page.`);
}
this.syncingTypes.add(typeName);
try {
return await this.syncObjectType(typeName);
return await this.syncConfiguredObjectType(enabledType);
} finally {
this.syncingTypes.delete(typeName);
}
@@ -431,20 +706,39 @@ class SyncEngine {
/**
* Force sync a single object
* Only syncs if the object type is enabled in configuration
* If the object was deleted from Jira, it will be removed from the local cache
* Uses recursive processing to store nested referenced objects
*/
async syncObject(typeName: CMDBObjectTypeName, objectId: string): Promise<boolean> {
try {
const jiraObj = await jiraAssetsClient.getObject(objectId);
if (!jiraObj) return false;
// Check if this type is enabled in configuration
const { schemaConfigurationService } = await import('./schemaConfigurationService.js');
const enabledTypes = await schemaConfigurationService.getEnabledObjectTypes();
const isEnabled = enabledTypes.some(et => et.objectTypeName === typeName);
const parsed = jiraAssetsClient.parseObject(jiraObj);
if (!parsed) return false;
if (!isEnabled) {
logger.warn(`SyncEngine: Cannot sync object ${objectId} - type ${typeName} is not enabled for syncing`);
return false;
}
await cacheStore.upsertObject(typeName, parsed);
await cacheStore.extractAndStoreRelations(typeName, parsed);
// Schema discovery must be manually triggered via API endpoints
// No automatic discovery
// Get raw ObjectEntry for recursive processing
const entry = await jiraAssetsClient.getObjectEntry(objectId);
if (!entry) return false;
// Use recursive storeObjectTree to process object and all nested references
const processedIds = new Set<string>();
const result = await this.storeObjectTree(entry, typeName, processedIds);
return true;
if (result.objectsStored > 0) {
logger.info(`SyncEngine: Synced object ${objectId} recursively: ${result.objectsStored} objects, ${result.relationsExtracted} relations`);
return true;
}
return false;
} catch (error) {
// If object was deleted from Jira, remove it from our cache
if (error instanceof JiraObjectNotFoundError) {