diff --git a/examples/mustache/mustache-java-r4-gen.ts b/examples/mustache/mustache-java-r4-gen.ts index 942bcac3..d96a4c6f 100644 --- a/examples/mustache/mustache-java-r4-gen.ts +++ b/examples/mustache/mustache-java-r4-gen.ts @@ -4,7 +4,7 @@ if (require.main === module) { console.log("📦 Generating FHIR R4 Core Types..."); const builder = new APIBuilder() - .setLogLevel("DEBUG") + .setLogLevel("debug") .throwException() .fromPackage("hl7.fhir.r4.core", "4.0.1") .outputTo("./examples/mustache/mustache-java-r4-output") diff --git a/examples/python/generate.ts b/examples/python/generate.ts index 3fb29250..babd9805 100644 --- a/examples/python/generate.ts +++ b/examples/python/generate.ts @@ -1,8 +1,14 @@ import { APIBuilder, prettyReport } from "../../src"; +import { type Logger, makeLogger } from "../../src/utils/logger"; console.log("📦 Generating FHIR R4 Core Types..."); -const builder = new APIBuilder() +const logger: Logger = makeLogger({ + prefix: "API", + suppressTags: ["FIELD_TYPE_NOT_FOUND", "LARGE_VALUESET"], +}); + +const builder = new APIBuilder({ logger }) .throwException() .fromPackage("hl7.fhir.r4.core", "4.0.1") .python({ diff --git a/src/api/builder.ts b/src/api/builder.ts index b3bba757..9b86355f 100644 --- a/src/api/builder.ts +++ b/src/api/builder.ts @@ -18,13 +18,7 @@ import type { IrConf, LogicalPromotionConf, TreeShakeConf } from "@root/typesche import { type Register, registerFromManager } from "@root/typeschema/register"; import { type PackageMeta, packageMetaToNpm } from "@root/typeschema/types"; import { mkTypeSchemaIndex, type TypeSchemaIndex } from "@root/typeschema/utils"; -import { - type CodegenLogger, - createLogger, - type LogLevel, - type LogLevelString, - parseLogLevel, -} from "@root/utils/codegen-logger"; +import { type Logger, type LogLevel, makeLogger } from "@root/utils/logger"; import { IntrospectionWriter, type IntrospectionWriterOptions } from "./writer-generator/introspection"; import { IrReportWriterWriter, type IrReportWriterWriterOptions } from "./writer-generator/ir-report"; import type { FileBasedMustacheGeneratorOptions } from "./writer-generator/mustache"; @@ -95,7 +89,7 @@ export interface LocalStructureDefinitionConfig { dependencies?: PackageMeta[]; } -const cleanup = async (opts: APIBuilderOptions, logger: CodegenLogger): Promise => { +const cleanup = async (opts: APIBuilderOptions, logger: Logger): Promise => { logger.info(`Cleaning outputs...`); try { logger.info(`Clean ${opts.outputDir}`); @@ -120,14 +114,14 @@ export class APIBuilder { localSDs: LocalPackageConfig[]; localTgzPackages: TgzPackageConfig[]; }; - private logger: CodegenLogger; + private logger: Logger; private generators: { name: string; writer: FileSystemWriter }[] = []; constructor( userOpts: Partial & { manager?: ReturnType; register?: Register; - logger?: CodegenLogger; + logger?: Logger; } = {}, ) { const defaultOpts: APIBuilderOptions = { @@ -137,7 +131,7 @@ export class APIBuilder { treeShake: undefined, promoteLogical: undefined, registry: undefined, - logLevel: parseLogLevel("INFO"), + logLevel: "info", dropCanonicalManagerCache: false, }; const opts: APIBuilderOptions = { @@ -167,7 +161,7 @@ export class APIBuilder { registry: userOpts.registry, dropCache: userOpts.dropCanonicalManagerCache, }); - this.logger = userOpts.logger ?? createLogger({ prefix: "API", level: opts.logLevel }); + this.logger = userOpts.logger ?? makeLogger({ prefix: "API", level: opts.logLevel }); this.options = opts; } @@ -328,8 +322,8 @@ export class APIBuilder { return this; } - setLogLevel(level: LogLevel | LogLevelString): APIBuilder { - this.logger?.setLevel(typeof level === "string" ? parseLogLevel(level) : level); + setLogLevel(level: LogLevel): APIBuilder { + this.logger?.setLevel(level); return this; } @@ -437,7 +431,7 @@ export class APIBuilder { this.logger.debug(`Generation completed: ${result.filesGenerated.length} files`); } catch (error) { - this.logger.error("Code generation failed", error instanceof Error ? error : new Error(String(error))); + this.logger.error(`Code generation failed: ${error instanceof Error ? error.message : String(error)}`); result.errors.push(error instanceof Error ? error.message : String(error)); if (this.options.throwException) throw error; } diff --git a/src/api/index.ts b/src/api/index.ts index 673f2ea4..12fddd7f 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -8,7 +8,7 @@ */ export type { IrConf, LogicalPromotionConf, TreeShakeConf } from "../typeschema/ir/types"; -export { LogLevel } from "../utils/codegen-logger"; +export type { LogLevel } from "../utils/logger"; export type { APIBuilderOptions, LocalStructureDefinitionConfig } from "./builder"; export { APIBuilder, prettyReport } from "./builder"; export type { CSharpGeneratorOptions } from "./writer-generator/csharp/csharp"; diff --git a/src/api/writer-generator/mustache.ts b/src/api/writer-generator/mustache.ts index cf270e68..754db89a 100644 --- a/src/api/writer-generator/mustache.ts +++ b/src/api/writer-generator/mustache.ts @@ -22,7 +22,7 @@ import type { ViewModel, } from "@mustache/types"; import type { TypeSchemaIndex } from "@root/typeschema/utils"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import { default as Mustache } from "mustache"; import { FileSystemWriter, type FileSystemWriterOptions } from "./writer"; @@ -57,7 +57,7 @@ export type MustacheGeneratorOptions = FileSystemWriterOptions & export function loadMustacheGeneratorConfig( templatePath: string, - logger?: CodegenLogger, + logger?: Logger, ): Partial { const filePath = Path.resolve(templatePath, "config.json"); try { diff --git a/src/api/writer-generator/writer.ts b/src/api/writer-generator/writer.ts index 33595498..0a6ffd07 100644 --- a/src/api/writer-generator/writer.ts +++ b/src/api/writer-generator/writer.ts @@ -2,12 +2,12 @@ import * as fs from "node:fs"; import * as fsPromises from "node:fs/promises"; import * as Path from "node:path"; import type { TypeSchemaIndex } from "@root/typeschema/utils"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; export type FileSystemWriterOptions = { outputDir: string; inMemoryOnly?: boolean; - logger?: CodegenLogger; + logger?: Logger; resolveAssets?: (fn: string) => string; }; @@ -36,7 +36,7 @@ export abstract class FileSystemWriter, G }, }, handler: async (argv) => { - const logger = createLogger({ + const logger = makeLogger({ prefix: "TypeSchema", }); try { - logger.step("Generating TypeSchema from FHIR packages"); + logger.info("Generating TypeSchema from FHIR packages"); logger.info(`Packages: ${argv.packages.join(", ")}`); logger.info(`Output: ${argv.output}`); @@ -113,7 +114,7 @@ export const generateTypeschemaCommand: CommandModule, G return { name: packageSpec, version: "latest" }; }); - logger.progress(`Processing packages: ${packageMetas.map((p) => `${p.name}@${p.version}`).join(", ")}`); + logger.info(`Processing packages: ${packageMetas.map((p) => `${p.name}@${p.version}`).join(", ")}`); // Create register from packages const register = await registerFromPackageMetas(packageMetas, { @@ -149,7 +150,7 @@ export const generateTypeschemaCommand: CommandModule, G const duration = Date.now() - startTime; complete(`Generated ${allSchemas.length} TypeSchema definitions`, duration, { schemas: allSchemas.length }); - logger.dim(`Output: ${outputPath}`); + logger.info(`Output: ${outputPath}`); if (argv.verbose) { logger.debug("Generated schemas:"); @@ -160,7 +161,7 @@ export const generateTypeschemaCommand: CommandModule, G list(schemaNames); } } catch (error) { - logger.error("Failed to generate TypeSchema", error instanceof Error ? error : new Error(String(error))); + logger.error(`Failed to generate TypeSchema: ${error instanceof Error ? error.message : String(error)}`); process.exit(1); } }, diff --git a/src/typeschema/core/binding.ts b/src/typeschema/core/binding.ts index e8bbb139..daa7de19 100644 --- a/src/typeschema/core/binding.ts +++ b/src/typeschema/core/binding.ts @@ -7,7 +7,7 @@ import assert from "node:assert"; import type { FHIRSchemaElement } from "@atomic-ehr/fhirschema"; import type { CodeSystem, CodeSystemConcept } from "@root/fhir-types/hl7-fhir-r4-core"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import type { Register } from "@typeschema/register"; import type { BindingTypeSchema, @@ -24,7 +24,7 @@ export function extractValueSetConceptsByUrl( register: Register, pkg: PackageMeta, valueSetUrl: CanonicalUrl, - logger?: CodegenLogger, + logger?: Logger, ): Concept[] | undefined { const cleanUrl = dropVersionFromUrl(valueSetUrl) || valueSetUrl; const valueSet = register.resolveVs(pkg, cleanUrl as CanonicalUrl); @@ -32,11 +32,7 @@ export function extractValueSetConceptsByUrl( return extractValueSetConcepts(register, valueSet, logger); } -function extractValueSetConcepts( - register: Register, - valueSet: RichValueSet, - _logger?: CodegenLogger, -): Concept[] | undefined { +function extractValueSetConcepts(register: Register, valueSet: RichValueSet, _logger?: Logger): Concept[] | undefined { if (valueSet.expansion?.contains) { return valueSet.expansion.contains .filter((item) => item.code !== undefined) @@ -106,7 +102,7 @@ export function buildEnum( register: Register, fhirSchema: RichFHIRSchema, element: FHIRSchemaElement, - logger?: CodegenLogger, + logger?: Logger, ): EnumDefinition | undefined { if (!element.binding) return undefined; @@ -115,7 +111,7 @@ export function buildEnum( if (!valueSetUrl) return undefined; if (!BINDABLE_TYPES.has(element.type ?? "")) { - logger?.dryWarn(`eld-11: Binding on non-bindable type '${element.type}' (valueSet: ${valueSetUrl})`); + logger?.dryWarn("BINDING", `eld-11: Binding on non-bindable type '${element.type}' (valueSet: ${valueSetUrl})`); return undefined; } @@ -132,6 +128,7 @@ export function buildEnum( if (codes.length > MAX_ENUM_LENGTH) { logger?.dryWarn( + "LARGE_VALUESET", `Value set ${valueSetUrl} has ${codes.length} which is more than ${MAX_ENUM_LENGTH} codes, which may cause issues with code generation.`, ); return undefined; @@ -146,7 +143,7 @@ function generateBindingSchema( fhirSchema: RichFHIRSchema, path: string[], element: FHIRSchemaElement, - logger?: CodegenLogger, + logger?: Logger, ): BindingTypeSchema | undefined { if (!element.binding?.valueSet) return undefined; @@ -171,7 +168,7 @@ function generateBindingSchema( export function collectBindingSchemas( register: Register, fhirSchema: RichFHIRSchema, - logger?: CodegenLogger, + logger?: Logger, ): BindingTypeSchema[] { const processedPaths = new Set(); if (!fhirSchema.elements) return []; diff --git a/src/typeschema/core/field-builder.ts b/src/typeschema/core/field-builder.ts index 0ba8ed5a..ad8d447a 100644 --- a/src/typeschema/core/field-builder.ts +++ b/src/typeschema/core/field-builder.ts @@ -6,7 +6,7 @@ import type { FHIRSchemaElement } from "@atomic-ehr/fhirschema"; import type { Register } from "@root/typeschema/register"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import { packageMetaToFhir } from "@typeschema/types"; import type { BindingIdentifier, @@ -126,7 +126,7 @@ export function buildFieldType( fhirSchema: RichFHIRSchema, path: string[], element: FHIRSchemaElement, - logger?: CodegenLogger, + logger?: Logger, ): Identifier | undefined { if (element.elementReference) { const refPath = element.elementReference @@ -149,6 +149,7 @@ export function buildFieldType( // Some packages (e.g., simplifier.core.r4.*) have incomplete element definitions // Log a warning but continue processing instead of throwing logger?.dryWarn( + "FIELD_TYPE_NOT_FOUND", `Can't recognize element type: <${fhirSchema.url}>.${path.join(".")} (pkg: '${packageMetaToFhir(fhirSchema.package_meta)}'): missing type info`, ); return undefined; @@ -160,7 +161,7 @@ export const mkField = ( fhirSchema: RichFHIRSchema, path: string[], element: FHIRSchemaElement, - logger?: CodegenLogger, + logger?: Logger, ): Field => { let binding: BindingIdentifier | undefined; let enumResult: EnumDefinition | undefined; @@ -175,7 +176,10 @@ export const mkField = ( const fieldType = buildFieldType(register, fhirSchema, path, element, logger); // TODO: should be an exception if (!fieldType) - logger?.dryWarn(`Field type not found for '${fhirSchema.url}#${path.join(".")}' (${fhirSchema.derivation})`); + logger?.dryWarn( + "FIELD_TYPE_NOT_FOUND", + `Field type not found for '${fhirSchema.url}#${path.join(".")}' (${fhirSchema.derivation})`, + ); return { type: fieldType as Identifier, required: isRequired(register, fhirSchema, path), @@ -221,7 +225,7 @@ export function mkNestedField( fhirSchema: RichFHIRSchema, path: string[], element: FHIRSchemaElement, - logger?: CodegenLogger, + logger?: Logger, ): RegularField { const nestedIdentifier = mkNestedIdentifier(register, fhirSchema, path, logger); return { diff --git a/src/typeschema/core/nested-types.ts b/src/typeschema/core/nested-types.ts index 1a432665..ca52dacc 100644 --- a/src/typeschema/core/nested-types.ts +++ b/src/typeschema/core/nested-types.ts @@ -6,7 +6,7 @@ import type { FHIRSchema, FHIRSchemaElement } from "@atomic-ehr/fhirschema"; import type { Register } from "@root/typeschema/register"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import type { CanonicalUrl, Field, Identifier, Name, NestedIdentifier, NestedType, RichFHIRSchema } from "../types"; import { isNestedElement, mkField, mkNestedField } from "./field-builder"; @@ -14,7 +14,7 @@ export function mkNestedIdentifier( register: Register, fhirSchema: RichFHIRSchema, path: string[], - logger?: CodegenLogger, + logger?: Logger, ): NestedIdentifier { // NOTE: profiles should no redefine types, they should reuse already defined in previous specializations const nestedTypeOrigins = {} as Record; @@ -69,7 +69,7 @@ function transformNestedElements( fhirSchema: RichFHIRSchema, parentPath: string[], elements: Record, - logger?: CodegenLogger, + logger?: Logger, ): Record { const fields: Record = {}; @@ -90,7 +90,7 @@ function transformNestedElements( export function mkNestedTypes( register: Register, fhirSchema: RichFHIRSchema, - logger?: CodegenLogger, + logger?: Logger, ): NestedType[] | undefined { if (!fhirSchema.elements) return undefined; diff --git a/src/typeschema/core/transformer.ts b/src/typeschema/core/transformer.ts index 4ca95440..4f09400f 100644 --- a/src/typeschema/core/transformer.ts +++ b/src/typeschema/core/transformer.ts @@ -6,7 +6,7 @@ import type { FHIRSchemaElement } from "@atomic-ehr/fhirschema"; import { shouldSkipCanonical } from "@root/typeschema/skip-hack"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import type { Register } from "@typeschema/register"; import { type CanonicalUrl, @@ -33,7 +33,7 @@ export function mkFields( fhirSchema: RichFHIRSchema, parentPath: string[], elements: Record | undefined, - logger?: CodegenLogger, + logger?: Logger, ): Record | undefined { if (!elements) return undefined; @@ -44,6 +44,7 @@ export function mkFields( const fcurl = elemSnapshot.type ? register.ensureSpecializationCanonicalUrl(elemSnapshot.type) : undefined; if (fcurl && shouldSkipCanonical(fhirSchema.package_meta, fcurl).shouldSkip) { logger?.warn( + "SKIP_CANONICAL", `Skipping field ${path} for ${fcurl} due to skip hack ${shouldSkipCanonical(fhirSchema.package_meta, fcurl).reason}`, ); continue; @@ -76,7 +77,7 @@ function extractFieldDependencies(fields: Record): Identifier[] { export async function transformValueSet( register: Register, valueSet: RichValueSet, - logger?: CodegenLogger, + logger?: Logger, ): Promise { if (!valueSet.url) throw new Error("ValueSet URL is required"); @@ -120,11 +121,7 @@ export function extractDependencies( return result.length > 0 ? result : undefined; } -function transformFhirSchemaResource( - register: Register, - fhirSchema: RichFHIRSchema, - logger?: CodegenLogger, -): TypeSchema[] { +function transformFhirSchemaResource(register: Register, fhirSchema: RichFHIRSchema, logger?: Logger): TypeSchema[] { const identifier = mkIdentifier(fhirSchema); let base: Identifier | undefined; @@ -173,7 +170,7 @@ function extractExtensionValueTypes( register: Register, fhirSchema: RichFHIRSchema, extensionUrl: CanonicalUrl, - logger?: CodegenLogger, + logger?: Logger, ): Identifier[] | undefined { const extensionSchema = register.resolveFs(fhirSchema.package_meta, extensionUrl); if (!extensionSchema?.elements) return undefined; @@ -193,7 +190,7 @@ function extractExtensionValueTypes( const extractLegacySubExtensions = ( register: Register, extensionSchema: RichFHIRSchema, - logger?: CodegenLogger, + logger?: Logger, ): ExtensionSubField[] => { const subExtensions: ExtensionSubField[] = []; if (!extensionSchema.elements) return subExtensions; @@ -265,7 +262,7 @@ const extractSubExtensions = ( register: Register, fhirSchema: RichFHIRSchema, extensionUrl: CanonicalUrl, - logger?: CodegenLogger, + logger?: Logger, ): ExtensionSubField[] | undefined => { const extensionSchema = register.resolveFs(fhirSchema.package_meta, extensionUrl); if (!extensionSchema?.elements) return undefined; @@ -277,11 +274,7 @@ const extractSubExtensions = ( return subExtensions.length > 0 ? subExtensions : undefined; }; -function extractProfileExtensions( - register: Register, - fhirSchema: RichFHIRSchema, - logger?: CodegenLogger, -): ProfileExtension[] { +function extractProfileExtensions(register: Register, fhirSchema: RichFHIRSchema, logger?: Logger): ProfileExtension[] { const extensions: ProfileExtension[] = []; const addExtensionEntry = (path: string[], name: string, schema: FHIRSchemaElement) => { @@ -333,7 +326,7 @@ function extractProfileExtensions( export async function transformFhirSchema( register: Register, fhirSchema: RichFHIRSchema, - logger?: CodegenLogger, + logger?: Logger, ): Promise { return transformFhirSchemaResource(register, fhirSchema, logger); } diff --git a/src/typeschema/index.ts b/src/typeschema/index.ts index 9aa625a6..46a1d32b 100644 --- a/src/typeschema/index.ts +++ b/src/typeschema/index.ts @@ -10,7 +10,7 @@ * - Validating TypeSchema documents */ -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import { transformFhirSchema, transformValueSet } from "./core/transformer"; import type { TypeSchemaCollisions } from "./ir/types"; import type { Register } from "./register"; @@ -33,10 +33,7 @@ type SchemaWithSource = { sourceCanonical: CanonicalUrl; }; -const deduplicateSchemas = ( - schemasWithSources: SchemaWithSource[], - logger?: CodegenLogger, -): GenerateTypeSchemasResult => { +const deduplicateSchemas = (schemasWithSources: SchemaWithSource[], logger?: Logger): GenerateTypeSchemasResult => { // key -> hash const groups: Record> = {}; @@ -62,7 +59,7 @@ const deduplicateSchemas = ( if (sorted.length > 1) { const pkg = best.typeSchema.identifier.package; const url = best.typeSchema.identifier.url; - logger?.dryWarn(`'${url}' from '${pkg}'' has ${sorted.length} versions`); + logger?.dryWarn("DUPLICATE_SCHEMA", `'${url}' from '${pkg}'' has ${sorted.length} versions`); collisions[pkg] ??= {}; collisions[pkg][url] = sorted.flatMap((v) => v.sources.map((s) => ({ @@ -77,10 +74,7 @@ const deduplicateSchemas = ( return { schemas, collisions }; }; -export const generateTypeSchemas = async ( - register: Register, - logger?: CodegenLogger, -): Promise => { +export const generateTypeSchemas = async (register: Register, logger?: Logger): Promise => { const schemasWithSources: { schema: TypeSchema; sourcePackage: PkgName; sourceCanonical: CanonicalUrl }[] = []; for (const fhirSchema of register.allFs()) { @@ -88,7 +82,7 @@ export const generateTypeSchemas = async ( const skipCheck = shouldSkipCanonical(fhirSchema.package_meta, fhirSchema.url); if (skipCheck.shouldSkip) { - logger?.dryWarn(`Skip ${fhirSchema.url} from ${pkgId}. Reason: ${skipCheck.reason}`); + logger?.dryWarn("SKIP_CANONICAL", `Skip ${fhirSchema.url} from ${pkgId}. Reason: ${skipCheck.reason}`); continue; } diff --git a/src/typeschema/ir/tree-shake.ts b/src/typeschema/ir/tree-shake.ts index fad2f2d5..b13229af 100644 --- a/src/typeschema/ir/tree-shake.ts +++ b/src/typeschema/ir/tree-shake.ts @@ -1,5 +1,5 @@ import assert from "node:assert"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import { extractDependencies } from "../core/transformer"; import { type CanonicalUrl, @@ -174,7 +174,7 @@ const mutableFillReport = (report: TreeShakeReport, tsIndex: TypeSchemaIndex, sh } }; -export const treeShakeTypeSchema = (schema: TypeSchema, rule: TreeShakeRule, _logger?: CodegenLogger): TypeSchema => { +export const treeShakeTypeSchema = (schema: TypeSchema, rule: TreeShakeRule, _logger?: Logger): TypeSchema => { schema = JSON.parse(JSON.stringify(schema)); if (isPrimitiveTypeSchema(schema) || isValueSetTypeSchema(schema) || isBindingSchema(schema)) return schema; diff --git a/src/typeschema/register.ts b/src/typeschema/register.ts index 166ccfbc..4176a9cf 100644 --- a/src/typeschema/register.ts +++ b/src/typeschema/register.ts @@ -7,7 +7,7 @@ import { type StructureDefinition, } from "@atomic-ehr/fhirschema"; import { type CodeSystem, isCodeSystem, isValueSet, type ValueSet } from "@root/fhir-types/hl7-fhir-r4-core"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import type { CanonicalUrl, Name, @@ -87,7 +87,7 @@ const mkPackageAwareResolver = async ( pkg: PackageMeta, deep: number, acc: PackageAwareResolver, - logger?: CodegenLogger, + logger?: Logger, ): Promise => { const pkgId = packageMetaToFhir(pkg); logger?.info(`${" ".repeat(deep * 2)}+ ${pkgId}`); @@ -99,7 +99,8 @@ const mkPackageAwareResolver = async ( if (!rawUrl) continue; if (!(isStructureDefinition(resource) || isValueSet(resource) || isCodeSystem(resource))) continue; const url = rawUrl as CanonicalUrl; - if (index.canonicalResolution[url]) logger?.dryWarn(`Duplicate canonical URL: ${url} at ${pkgId}.`); + if (index.canonicalResolution[url]) + logger?.dryWarn("DUPLICATE_CANONICAL", `Duplicate canonical URL: ${url} at ${pkgId}.`); index.canonicalResolution[url] = [{ deep, pkg: pkg, pkgId, resource: resource as FocusedResource }]; } @@ -119,7 +120,7 @@ const mkPackageAwareResolver = async ( return index; }; -const enrichResolver = (resolver: PackageAwareResolver, logger?: CodegenLogger) => { +const enrichResolver = (resolver: PackageAwareResolver, logger?: Logger) => { for (const { pkg, canonicalResolution } of Object.values(resolver)) { const pkgId = packageMetaToFhir(pkg); if (!resolver[pkgId]) throw new Error(`Package ${pkgId} not found`); @@ -145,11 +146,7 @@ const enrichResolver = (resolver: PackageAwareResolver, logger?: CodegenLogger) } }; -const packageAgnosticResolveCanonical = ( - resolver: PackageAwareResolver, - url: CanonicalUrl, - _logger?: CodegenLogger, -) => { +const packageAgnosticResolveCanonical = (resolver: PackageAwareResolver, url: CanonicalUrl, _logger?: Logger) => { const options = Object.values(resolver).flatMap((pkg) => pkg.canonicalResolution[url]); if (!options) throw new Error(`No canonical resolution found for ${url} in any package`); // if (options.length > 1) @@ -164,7 +161,7 @@ const packageAgnosticResolveCanonical = ( }; export type RegisterConfig = { - logger?: CodegenLogger; + logger?: Logger; focusedPackages?: PackageMeta[]; /** Custom FHIR package registry URL */ registry?: string; @@ -361,7 +358,7 @@ export const registerFromPackageMetas = async ( conf: RegisterConfig, ): Promise => { const packageNames = packageMetas.map(packageMetaToNpm); - conf?.logger?.step(`Loading FHIR packages: ${packageNames.join(", ")}`); + conf?.logger?.info(`Loading FHIR packages: ${packageNames.join(", ")}`); const manager = CanonicalManager({ packages: packageNames, workingDir: "tmp/fhir", diff --git a/src/typeschema/types.ts b/src/typeschema/types.ts index 09143c34..3edce3cf 100644 --- a/src/typeschema/types.ts +++ b/src/typeschema/types.ts @@ -388,7 +388,7 @@ export const enrichValueSet = (vs: ValueSet, packageMeta: PackageMeta): RichValu /////////////////////////////////////////////////////////// export interface TypeschemaGeneratorOptions { - logger?: import("../utils/codegen-logger").CodegenLogger; + logger?: import("../utils/logger").Logger; treeshake?: string[]; manager: ReturnType; /** Custom FHIR package registry URL */ diff --git a/src/typeschema/utils.ts b/src/typeschema/utils.ts index 4e9b6e1a..479fa768 100644 --- a/src/typeschema/utils.ts +++ b/src/typeschema/utils.ts @@ -1,6 +1,6 @@ import * as afs from "node:fs/promises"; import * as Path from "node:path"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import * as YAML from "yaml"; import type { IrReport } from "./ir/types"; import type { Register } from "./register"; @@ -199,7 +199,7 @@ export const mkTypeSchemaIndex = ( irReport = {}, }: { register?: Register; - logger?: CodegenLogger; + logger?: Logger; irReport?: IrReport; }, ): TypeSchemaIndex => { @@ -263,6 +263,7 @@ export const mkTypeSchemaIndex = ( const resolved = resolve(base); if (!resolved) { logger?.warn( + "RESOLVE_BASE", `Failed to resolve base type: ${res.map((e) => `${e.identifier.url} (${e.identifier.kind})`).join(", ")}`, ); return undefined; diff --git a/src/utils/logger.ts b/src/utils/logger.ts new file mode 100644 index 00000000..75e24744 --- /dev/null +++ b/src/utils/logger.ts @@ -0,0 +1,160 @@ +type TagsOf = L extends Logger ? T : never; + +export type ExtendLogger> = Logger | Extra>; + +export type LogLevel = "info" | "warn" | "error" | "debug"; + +export type LogEntry = { + level: LogLevel; + tag?: T; + message: string; + suppressed: boolean; + prefix: string; + timestamp: number; +}; + +export type LoggerOptions = { + prefix?: string; + suppressTags?: T[]; + level?: LogLevel; +}; + +export type TaggedLogFn = { + (msg: string): void; + (tag: T, msg: string): void; +}; + +export type Logger = { + warn: TaggedLogFn; + dryWarn: TaggedLogFn; + info: TaggedLogFn; + error: TaggedLogFn; + debug: TaggedLogFn; + + fork(prefix: string, opts?: Partial>): Logger; + as(): Logger; + + suppress(...tags: T[]): void; + setLevel(level: LogLevel): void; + tagCounts(): ReadonlyMap; + printSuppressedSummary(): void; + + buffer(): readonly LogEntry[]; + bufferClear(): void; +}; + +const LEVEL_PRIORITY: Record = { debug: 0, info: 1, warn: 2, error: 3 }; + +export function makeLogger(opts: LoggerOptions = {}): Logger { + const prefix = opts.prefix ?? ""; + const suppressedSet = new Set(opts.suppressTags ?? []); + const tagCountsMap = new Map(); + const entries: LogEntry[] = []; + const drySet = new Set(); + let currentLevel: LogLevel = opts.level ?? "info"; + + const shouldLog = (level: LogLevel): boolean => LEVEL_PRIORITY[level] >= LEVEL_PRIORITY[currentLevel]; + + const yellow = (s: string) => `\x1b[33m${s}\x1b[0m`; + const red = (s: string) => `\x1b[31m${s}\x1b[0m`; + const colorize: Record string> = { + debug: (s) => s, + info: (s) => s, + warn: yellow, + error: red, + }; + + const fmt = (level: LogLevel, icon: string, msg: string, tag?: string) => { + const pfx = prefix ? `[${prefix}] ` : ""; + const tagStr = tag ? `[${tag}] ` : ""; + return colorize[level](`${icon} ${pfx}${tagStr}${msg}`); + }; + + const pushEntry = (level: LogLevel, msg: string, tag?: T, suppressed = false) => { + entries.push({ level, tag, message: msg, suppressed, prefix, timestamp: Date.now() }); + }; + + const parseArgs = (a: string, b?: string): { tag?: T; msg: string } => { + if (b !== undefined) return { tag: a as T, msg: b }; + return { msg: a }; + }; + + const mkLogFn = (level: LogLevel, icon: string, consoleFn: (...args: any[]) => void): TaggedLogFn => { + return ((a: string, b?: string) => { + const { tag, msg } = parseArgs(a, b); + if (tag) tagCountsMap.set(tag, (tagCountsMap.get(tag) ?? 0) + 1); + const isSuppressed = tag !== undefined && suppressedSet.has(tag); + pushEntry(level, msg, tag, isSuppressed); + if (isSuppressed) return; + if (!shouldLog(level)) return; + consoleFn(fmt(level, icon, msg, tag)); + }) as TaggedLogFn; + }; + + const mkDryLogFn = (level: LogLevel, icon: string, consoleFn: (...args: any[]) => void): TaggedLogFn => { + return ((a: string, b?: string) => { + const { tag, msg } = parseArgs(a, b); + if (tag) tagCountsMap.set(tag, (tagCountsMap.get(tag) ?? 0) + 1); + const isSuppressed = tag !== undefined && suppressedSet.has(tag); + pushEntry(level, msg, tag, isSuppressed); + if (isSuppressed) return; + if (!shouldLog(level)) return; + const dedupeKey = `${level}::${tag ?? ""}::${msg}`; + if (drySet.has(dedupeKey)) return; + drySet.add(dedupeKey); + consoleFn(fmt(level, icon, msg, tag)); + }) as TaggedLogFn; + }; + + const logger: Logger = { + warn: mkLogFn("warn", "!", console.warn), + dryWarn: mkDryLogFn("warn", "!", console.warn), + info: mkLogFn("info", "i", console.log), + error: mkLogFn("error", "X", console.error), + debug: mkLogFn("debug", "D", console.log), + + fork(childPrefix: string, childOpts?: Partial>): Logger { + const fullPrefix = prefix ? `${prefix}:${childPrefix}` : childPrefix; + return makeLogger({ + prefix: fullPrefix, + suppressTags: [...((opts.suppressTags ?? []) as unknown as C[]), ...(childOpts?.suppressTags ?? [])], + level: childOpts?.level ?? currentLevel, + }); + }, + + as(): Logger { + return logger as unknown as Logger; + }, + + suppress(...tags: T[]) { + for (const tag of tags) suppressedSet.add(tag); + }, + + setLevel(level: LogLevel) { + currentLevel = level; + }, + + tagCounts(): ReadonlyMap { + return tagCountsMap; + }, + + printSuppressedSummary() { + const suppressed = [...tagCountsMap.entries()] + .filter(([tag]) => suppressedSet.has(tag)) + .map(([tag, count]) => `${tag}: ${count}`); + if (suppressed.length > 0) { + logger.info(`Suppressed: ${suppressed.join(", ")}`); + } + }, + + buffer(): readonly LogEntry[] { + return entries; + }, + + bufferClear() { + entries.length = 0; + }, + }; + + return logger; +} diff --git a/test/api/mustache.test.ts b/test/api/mustache.test.ts index 02a42c5c..b1548ed2 100644 --- a/test/api/mustache.test.ts +++ b/test/api/mustache.test.ts @@ -4,7 +4,7 @@ import { r4Manager } from "@typeschema-test/utils"; describe("Mustache Template Based Generation", async () => { const report = await new APIBuilder({ register: r4Manager }) - .setLogLevel("SILENT") + .setLogLevel("error") .mustache("./examples/mustache/java", { debug: "COMPACT", inMemoryOnly: true, diff --git a/test/api/write-generator/csharp.test.ts b/test/api/write-generator/csharp.test.ts index c4031e3a..9964d197 100644 --- a/test/api/write-generator/csharp.test.ts +++ b/test/api/write-generator/csharp.test.ts @@ -4,7 +4,7 @@ import { r4Manager } from "@typeschema-test/utils"; describe("C# Writer Generator", async () => { const result = await new APIBuilder({ register: r4Manager }) - .setLogLevel("SILENT") + .setLogLevel("error") .csharp({ inMemoryOnly: true, }) diff --git a/test/api/write-generator/introspection.test.ts b/test/api/write-generator/introspection.test.ts index 70779a7e..f68cae92 100644 --- a/test/api/write-generator/introspection.test.ts +++ b/test/api/write-generator/introspection.test.ts @@ -4,7 +4,7 @@ import { r4Manager } from "@typeschema-test/utils"; describe("IntrospectionWriter - Fhir Schema Output", async () => { const result = await new APIBuilder({ register: r4Manager }) - .setLogLevel("SILENT") + .setLogLevel("error") .introspection({ fhirSchemas: "introspection" }) .introspection({ fhirSchemas: "introspection.ndjson" }) .generate(); @@ -28,7 +28,7 @@ describe("IntrospectionWriter - Fhir Schema Output", async () => { describe("IntrospectionWriter - TypeSchema output", async () => { const result = await new APIBuilder({ register: r4Manager }) - .setLogLevel("SILENT") + .setLogLevel("error") .typeSchema({ treeShake: { "hl7.fhir.r4.core": { @@ -68,7 +68,7 @@ describe("IntrospectionWriter - TypeSchema output", async () => { describe("IntrospectionWriter - typeTree", async () => { const result = await new APIBuilder({ register: r4Manager }) - .setLogLevel("SILENT") + .setLogLevel("error") .typeSchema({ treeShake: { "hl7.fhir.r4.core": { @@ -94,7 +94,7 @@ describe("IntrospectionWriter - typeTree", async () => { describe("IntrospectionWriter - StructureDefinition output", async () => { const result = await new APIBuilder({ register: r4Manager }) - .setLogLevel("SILENT") + .setLogLevel("error") .typeSchema({ treeShake: { "hl7.fhir.r4.core": { diff --git a/test/api/write-generator/python.test.ts b/test/api/write-generator/python.test.ts index 265c9530..074df501 100644 --- a/test/api/write-generator/python.test.ts +++ b/test/api/write-generator/python.test.ts @@ -4,7 +4,7 @@ import { r4Manager } from "@typeschema-test/utils"; describe("Python Writer Generator", async () => { const result = await new APIBuilder({ register: r4Manager }) - .setLogLevel("SILENT") + .setLogLevel("error") .python({ inMemoryOnly: true, }) diff --git a/test/api/write-generator/typescript.test.ts b/test/api/write-generator/typescript.test.ts index f84c98cd..778ea7a9 100644 --- a/test/api/write-generator/typescript.test.ts +++ b/test/api/write-generator/typescript.test.ts @@ -1,24 +1,12 @@ import { describe, expect, it } from "bun:test"; import { APIBuilder } from "@root/api/builder"; import type { CanonicalUrl } from "@root/typeschema/types"; -import { CodegenLogger, LogLevel } from "@root/utils/codegen-logger"; +import { makeLogger } from "@root/utils/logger"; import { ccdaManager, r4Manager } from "@typeschema-test/utils"; -/** Creates a logger that captures all warnings for testing */ -const createCapturingLogger = () => { - const warnings: string[] = []; - const logger = new CodegenLogger({ level: LogLevel.WARN }); - const originalWarn = logger.warn.bind(logger); - logger.warn = (message: string) => { - warnings.push(message); - originalWarn(message); - }; - return { logger, warnings }; -}; - describe("TypeScript Writer Generator", async () => { const result = await new APIBuilder({ register: r4Manager }) - .setLogLevel("SILENT") + .setLogLevel("error") .typescript({ inMemoryOnly: true, }) @@ -42,7 +30,7 @@ describe("TypeScript Writer Generator", async () => { describe("TypeScript CDA with Logical Model Promotion to Resource", async () => { const result = await new APIBuilder({ register: ccdaManager }) - .setLogLevel("SILENT") + .setLogLevel("error") .typeSchema({ promoteLogical: { "hl7.cda.uv.core": ["http://hl7.org/cda/stds/core/StructureDefinition/Material" as CanonicalUrl], @@ -64,10 +52,9 @@ describe("TypeScript CDA with Logical Model Promotion to Resource", async () => }); describe("TypeScript R4 Example (with generateProfile)", async () => { - const { logger, warnings } = createCapturingLogger(); + const logger = makeLogger({ level: "error" }); const result = await new APIBuilder({ register: r4Manager, logger }) - .setLogLevel("SILENT") .typescript({ inMemoryOnly: true, withDebugComment: false, @@ -81,7 +68,9 @@ describe("TypeScript R4 Example (with generateProfile)", async () => { }); it("has no file rewrite warnings", () => { - const rewriteWarnings = warnings.filter((w) => w.includes("File will be rewritten")); + const rewriteWarnings = logger + .buffer() + .filter((e) => e.level === "warn" && e.message.includes("File will be rewritten")); expect(rewriteWarnings).toEqual([]); }); }); diff --git a/test/unit/typeschema/field-builder.test.ts b/test/unit/typeschema/field-builder.test.ts index ab33c93d..de4db42d 100644 --- a/test/unit/typeschema/field-builder.test.ts +++ b/test/unit/typeschema/field-builder.test.ts @@ -1,7 +1,7 @@ import { describe, expect, it } from "bun:test"; import type { FHIRSchemaElement } from "@atomic-ehr/fhirschema"; import type { Register } from "@root/typeschema/register"; -import type { CodegenLogger } from "@root/utils/codegen-logger"; +import type { Logger } from "@root/utils/logger"; import { isNestedElement, mkField, mkNestedField } from "@typeschema/core/field-builder"; import type { ChoiceFieldDeclaration, Name, PackageMeta, RegularField } from "@typeschema/types"; import { mkR4Register, type PFS, registerFs } from "@typeschema-test/utils"; @@ -16,7 +16,7 @@ const registerAndMkNestedField = ( fhirSchema: PFS, path: string[], element: FHIRSchemaElement, - logger?: CodegenLogger, + logger?: Logger, ) => { const rfs = registerFs(register, fhirSchema); return mkNestedField(register, rfs, path, element, logger); diff --git a/test/unit/typeschema/utils.ts b/test/unit/typeschema/utils.ts index ba4bcc08..69f3de1b 100644 --- a/test/unit/typeschema/utils.ts +++ b/test/unit/typeschema/utils.ts @@ -2,7 +2,7 @@ import type { FHIRSchema } from "@atomic-ehr/fhirschema"; import type { ValueSet } from "@root/fhir-types/hl7-fhir-r4-core"; import { generateTypeSchemas } from "@root/typeschema"; import { mkTypeSchemaIndex } from "@root/typeschema/utils"; -import { type CodegenLogger, createLogger } from "@root/utils/codegen-logger"; +import { type Logger, makeLogger } from "@root/utils/logger"; import { transformFhirSchema, transformValueSet } from "@typeschema/core/transformer"; import { type Register, registerFromPackageMetas } from "@typeschema/register"; import { type CanonicalUrl, enrichFHIRSchema, enrichValueSet, type PackageMeta } from "@typeschema/types"; @@ -10,9 +10,9 @@ import { type CanonicalUrl, enrichFHIRSchema, enrichValueSet, type PackageMeta } export type PFS = Partial; export type PVS = Partial; -const logger = createLogger({ prefix: "TEST" }); +const logger = makeLogger({ prefix: "TEST" }); -export const mkIndex = async (register: Register, logger?: CodegenLogger) => { +export const mkIndex = async (register: Register, logger?: Logger) => { const { schemas } = await generateTypeSchemas(register, logger); return mkTypeSchemaIndex(schemas, { register, logger }); }; diff --git a/test/unit/utils/logger.test.ts b/test/unit/utils/logger.test.ts new file mode 100644 index 00000000..8b0e1f9b --- /dev/null +++ b/test/unit/utils/logger.test.ts @@ -0,0 +1,433 @@ +import { beforeEach, describe, expect, it, mock } from "bun:test"; +import { type ExtendLogger, type LogEntry, type Logger, makeLogger } from "@root/utils/logger"; + +type BufferFilter = { level?: string; tag?: T; suppressed?: boolean }; + +const bufferFilter = (logger: Logger, filter: BufferFilter): LogEntry[] => + logger.buffer().filter((e) => { + if (filter.level !== undefined && e.level !== filter.level) return false; + if (filter.tag !== undefined && e.tag !== filter.tag) return false; + if (filter.suppressed !== undefined && e.suppressed !== filter.suppressed) return false; + return true; + }); + +type TestTags = "TAG_A" | "TAG_B" | "TAG_C"; + +describe("makeLogger", () => { + let logger: Logger; + + beforeEach(() => { + logger = makeLogger({ prefix: "test" }); + mock.module("console", () => ({})); // silence console in tests + }); + + describe("untagged logging", () => { + it("buffers info messages", () => { + logger.info("hello"); + const entry = logger.buffer()[0]; + expect(entry).toBeDefined(); + expect(entry?.level).toBe("info"); + expect(entry?.message).toBe("hello"); + expect(entry?.tag).toBeUndefined(); + expect(entry?.suppressed).toBe(false); + expect(entry?.prefix).toBe("test"); + }); + + it("untagged messages are never suppressed", () => { + const l = makeLogger({ suppressTags: ["TAG_A", "TAG_B", "TAG_C"] }); + l.info("still visible"); + l.warn("still visible"); + l.error("still visible"); + l.debug("still visible"); + expect(bufferFilter(l, { suppressed: true })).toHaveLength(0); + expect(l.buffer()).toHaveLength(4); + }); + }); + + describe("tagged logging", () => { + it("buffers tagged messages with tag field set", () => { + logger.info("TAG_A", "tagged info"); + const entry = logger.buffer()[0]; + expect(entry).toBeDefined(); + expect(entry?.tag).toBe("TAG_A"); + expect(entry?.message).toBe("tagged info"); + expect(entry?.level).toBe("info"); + }); + + it("works for all log levels", () => { + logger.info("TAG_A", "i"); + logger.warn("TAG_B", "w"); + logger.error("TAG_C", "e"); + logger.debug("TAG_A", "d"); + expect(logger.buffer().map((e) => e.level)).toEqual(["info", "warn", "error", "debug"]); + expect(logger.buffer().every((e) => e.tag !== undefined)).toBe(true); + }); + + it("increments tag counts", () => { + logger.warn("TAG_A", "one"); + logger.warn("TAG_A", "two"); + logger.info("TAG_B", "three"); + expect(logger.tagCounts().get("TAG_A")).toBe(2); + expect(logger.tagCounts().get("TAG_B")).toBe(1); + expect(logger.tagCounts().has("TAG_C")).toBe(false); + }); + + it("does not increment tag counts for untagged messages", () => { + logger.info("no tag"); + expect(logger.tagCounts().size).toBe(0); + }); + }); + + describe("suppression", () => { + it("suppresses tagged messages matching suppressTags", () => { + const l = makeLogger({ suppressTags: ["TAG_A"] }); + l.warn("TAG_A", "suppressed"); + l.warn("TAG_B", "visible"); + + expect(l.buffer()).toHaveLength(2); + expect(l.buffer()[0]?.suppressed).toBe(true); + expect(l.buffer()[1]?.suppressed).toBe(false); + }); + + it("still counts suppressed tags", () => { + const l = makeLogger({ suppressTags: ["TAG_A"] }); + l.warn("TAG_A", "one"); + l.warn("TAG_A", "two"); + expect(l.tagCounts().get("TAG_A")).toBe(2); + }); + + it("suppress() adds tags at runtime", () => { + logger.warn("TAG_B", "before"); + expect(logger.buffer()[0]?.suppressed).toBe(false); + + logger.suppress("TAG_B"); + logger.warn("TAG_B", "after"); + expect(logger.buffer()[1]?.suppressed).toBe(true); + }); + }); + + describe("dryWarn deduplication", () => { + it("deduplicates identical tag+message pairs", () => { + logger.dryWarn("TAG_A", "same"); + logger.dryWarn("TAG_A", "same"); + logger.dryWarn("TAG_A", "same"); + // all 3 buffered + expect(logger.buffer()).toHaveLength(3); + // but only 1 was not suppressed (the first), the rest are deduped at console level + // all are marked suppressed=false since TAG_A is not in suppressTags + expect(bufferFilter(logger, { suppressed: false })).toHaveLength(3); + expect(logger.tagCounts().get("TAG_A")).toBe(3); + }); + + it("different messages are not deduped", () => { + logger.dryWarn("TAG_A", "msg1"); + logger.dryWarn("TAG_A", "msg2"); + expect(logger.buffer()).toHaveLength(2); + }); + + it("same message with different tags are not deduped", () => { + logger.dryWarn("TAG_A", "same"); + logger.dryWarn("TAG_B", "same"); + expect(logger.buffer()).toHaveLength(2); + }); + + it("untagged dryWarn deduplicates by message", () => { + logger.dryWarn("same msg"); + logger.dryWarn("same msg"); + logger.dryWarn("different msg"); + expect(logger.buffer()).toHaveLength(3); + }); + }); + + describe("fork", () => { + it("creates child with combined prefix", () => { + const child = logger.fork("child"); + child.info("hello"); + expect(child.buffer()[0]?.prefix).toBe("test:child"); + }); + + it("creates child from root without parent prefix", () => { + const root = makeLogger({}); + const child = root.fork("child"); + child.info("hello"); + expect(child.buffer()[0]?.prefix).toBe("child"); + }); + + it("inherits parent suppressTags", () => { + const parent = makeLogger({ suppressTags: ["TAG_A"] }); + const child = parent.fork("child"); + child.warn("TAG_A", "inherited suppression"); + expect(child.buffer()[0]?.suppressed).toBe(true); + }); + + it("adds child-specific suppressTags", () => { + const parent = makeLogger({ suppressTags: ["TAG_A"] }); + const child = parent.fork("child", { suppressTags: ["TAG_B"] }); + child.warn("TAG_A", "from parent"); + child.warn("TAG_B", "from child"); + child.warn("TAG_C", "not suppressed"); + expect(bufferFilter(child, { suppressed: true })).toHaveLength(2); + expect(child.buffer()[2]?.suppressed).toBe(false); + }); + + it("child has independent buffer", () => { + const child = logger.fork("child"); + logger.info("parent"); + child.info("child"); + expect(logger.buffer()).toHaveLength(1); + expect(child.buffer()).toHaveLength(1); + expect(logger.buffer()[0]?.message).toBe("parent"); + expect(child.buffer()[0]?.message).toBe("child"); + }); + + it("child has independent tag counts", () => { + const child = logger.fork("child"); + logger.warn("TAG_A", "parent"); + child.warn("TAG_A", "child"); + child.warn("TAG_A", "child2"); + expect(logger.tagCounts().get("TAG_A")).toBe(1); + expect(child.tagCounts().get("TAG_A")).toBe(2); + }); + + it("narrows tag set on fork", () => { + type Narrow = "TAG_A"; + const child = logger.fork("narrow"); + child.warn("TAG_A", "valid"); + expect(child.buffer()[0]?.tag).toBe("TAG_A"); + }); + }); + + describe("as (narrowing)", () => { + it("returns the same logger instance with narrowed type", () => { + type Narrow = "TAG_A" | "TAG_B"; + const narrow = logger.as(); + narrow.warn("TAG_A", "works"); + expect(logger.buffer()).toHaveLength(1); + expect(narrow.buffer()).toHaveLength(1); + }); + + it("narrowed logger inherits suppression from original", () => { + const parent = makeLogger({ suppressTags: ["TAG_A"] }); + type Narrow = "TAG_A"; + const narrow = parent.as(); + narrow.warn("TAG_A", "suppressed via parent"); + expect(narrow.buffer()[0]?.suppressed).toBe(true); + }); + + it("suppress on narrowed logger affects original", () => { + type Narrow = "TAG_A" | "TAG_B"; + const narrow = logger.as(); + narrow.suppress("TAG_A"); + logger.warn("TAG_A", "should be suppressed"); + expect(logger.buffer()[0]?.suppressed).toBe(true); + }); + }); + + describe("ExtendLogger (extending)", () => { + type BaseTags = "BASE_A" | "BASE_B"; + type ExtraTags = "EXTRA_X" | "EXTRA_Y"; + type Combined = ExtendLogger>; + + it("extended logger accepts both base and extra tags", () => { + const l: Combined = makeLogger({}); + l.warn("BASE_A", "base tag"); + l.warn("EXTRA_X", "extra tag"); + expect(l.buffer()).toHaveLength(2); + expect(l.buffer()[0]?.tag).toBe("BASE_A"); + expect(l.buffer()[1]?.tag).toBe("EXTRA_X"); + }); + + it("extended logger suppresses both base and extra tags", () => { + const l: Combined = makeLogger({ + suppressTags: ["BASE_A", "EXTRA_X"], + }); + l.warn("BASE_A", "suppressed base"); + l.warn("BASE_B", "visible base"); + l.warn("EXTRA_X", "suppressed extra"); + l.warn("EXTRA_Y", "visible extra"); + expect(bufferFilter(l, { suppressed: true })).toHaveLength(2); + expect(bufferFilter(l, { suppressed: false })).toHaveLength(2); + }); + + it("base logger can be passed where extended is expected via as()", () => { + const base = makeLogger({}); + const extended = base.as(); + extended.warn("EXTRA_X", "works at runtime"); + expect(base.buffer()).toHaveLength(1); + expect(extended.buffer()[0]?.tag).toBe("EXTRA_X"); + }); + + it("fork from extended logger can narrow to base tags", () => { + const extended: Combined = makeLogger({ + prefix: "root", + suppressTags: ["BASE_A"], + }); + const child = extended.fork("child"); + child.warn("BASE_A", "suppressed from parent"); + child.warn("BASE_B", "visible"); + expect(bufferFilter(child, { suppressed: true })).toHaveLength(1); + expect(child.buffer()[0]?.tag).toBe("BASE_A"); + expect(child.buffer()[1]?.prefix).toBe("root:child"); + }); + }); + + describe("buffer", () => { + it("returns entries in insertion order", () => { + logger.info("first"); + logger.warn("second"); + logger.error("third"); + expect(logger.buffer().map((e) => e.message)).toEqual(["first", "second", "third"]); + }); + + it("includes timestamp", () => { + const before = Date.now(); + logger.info("timed"); + const after = Date.now(); + const ts = logger.buffer()[0]?.timestamp; + expect(ts).toBeGreaterThanOrEqual(before); + expect(ts).toBeLessThanOrEqual(after); + }); + }); + + describe("bufferFilter", () => { + beforeEach(() => { + const l = makeLogger({ prefix: "f", suppressTags: ["TAG_C"] }); + l.info("untagged info"); + l.warn("TAG_A", "tagged warn"); + l.error("TAG_B", "tagged error"); + l.debug("untagged debug"); + l.info("TAG_C", "suppressed info"); + logger = l; + }); + + it("filters by level", () => { + expect(bufferFilter(logger, { level: "info" })).toHaveLength(2); + expect(bufferFilter(logger, { level: "warn" })).toHaveLength(1); + expect(bufferFilter(logger, { level: "error" })).toHaveLength(1); + expect(bufferFilter(logger, { level: "debug" })).toHaveLength(1); + }); + + it("filters by tag", () => { + expect(bufferFilter(logger, { tag: "TAG_A" })).toHaveLength(1); + expect(bufferFilter(logger, { tag: "TAG_B" })).toHaveLength(1); + expect(bufferFilter(logger, { tag: "TAG_C" })).toHaveLength(1); + }); + + it("filters by suppressed", () => { + expect(bufferFilter(logger, { suppressed: true })).toHaveLength(1); + expect(bufferFilter(logger, { suppressed: false })).toHaveLength(4); + }); + + it("combines filters", () => { + expect(bufferFilter(logger, { level: "info", suppressed: true })).toHaveLength(1); + expect(bufferFilter(logger, { level: "info", suppressed: false })).toHaveLength(1); + expect(bufferFilter(logger, { level: "warn", tag: "TAG_A" })).toHaveLength(1); + expect(bufferFilter(logger, { level: "warn", tag: "TAG_B" })).toHaveLength(0); + }); + }); + + describe("bufferClear", () => { + it("empties the buffer", () => { + logger.info("a"); + logger.warn("b"); + expect(logger.buffer()).toHaveLength(2); + logger.bufferClear(); + expect(logger.buffer()).toHaveLength(0); + }); + + it("does not reset tag counts", () => { + logger.warn("TAG_A", "msg"); + logger.bufferClear(); + expect(logger.tagCounts().get("TAG_A")).toBe(1); + }); + }); + + describe("printSuppressedSummary", () => { + it("emits an info entry with suppressed counts", () => { + const l = makeLogger({ suppressTags: ["TAG_A", "TAG_B"] }); + l.warn("TAG_A", "a1"); + l.warn("TAG_A", "a2"); + l.warn("TAG_B", "b1"); + l.printSuppressedSummary(); + + const summaryEntries = bufferFilter(l, { level: "info" }); + expect(summaryEntries).toHaveLength(1); + expect(summaryEntries[0]?.message).toContain("TAG_A: 2"); + expect(summaryEntries[0]?.message).toContain("TAG_B: 1"); + }); + + it("does nothing when no tags are suppressed", () => { + logger.warn("TAG_A", "visible"); + const countBefore = logger.buffer().length; + logger.printSuppressedSummary(); + expect(logger.buffer()).toHaveLength(countBefore); + }); + }); + + describe("prefix", () => { + it("uses empty prefix by default", () => { + const l = makeLogger({}); + l.info("msg"); + expect(l.buffer()[0]?.prefix).toBe(""); + }); + + it("nests prefixes through multiple forks", () => { + const child = logger.fork("a").fork("b"); + child.info("deep"); + expect(child.buffer()[0]?.prefix).toBe("test:a:b"); + }); + }); + + describe("log level filtering", () => { + it("defaults to info level (debug messages not printed but buffered)", () => { + const l = makeLogger({}); + l.debug("hidden"); + l.info("visible"); + expect(l.buffer()).toHaveLength(2); + }); + + it("filters messages below configured level", () => { + const l = makeLogger({ level: "warn" }); + l.debug("d"); + l.info("i"); + l.warn("w"); + l.error("e"); + // all 4 buffered + expect(l.buffer()).toHaveLength(4); + }); + + it("setLevel changes level at runtime", () => { + const l = makeLogger({ level: "info" }); + l.debug("before"); + l.setLevel("debug"); + l.debug("after"); + // both buffered regardless + expect(l.buffer()).toHaveLength(2); + }); + + it("fork inherits parent level", () => { + const parent = makeLogger({ level: "warn" }); + const child = parent.fork("child"); + child.debug("d"); + child.info("i"); + child.warn("w"); + expect(child.buffer()).toHaveLength(3); + }); + + it("fork can override parent level", () => { + const parent = makeLogger({ level: "warn" }); + const child = parent.fork("child", { level: "debug" }); + child.debug("d"); + expect(child.buffer()).toHaveLength(1); + }); + + it("level filtering works alongside tag suppression", () => { + const l = makeLogger({ level: "warn", suppressTags: ["TAG_A"] }); + l.info("TAG_A", "suppressed + below level"); + l.warn("TAG_A", "suppressed at level"); + l.warn("TAG_B", "visible"); + expect(l.buffer()).toHaveLength(3); + expect(bufferFilter(l, { suppressed: true })).toHaveLength(2); + }); + }); +});