Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
with:
cache: yarn
- run: yarn
Expand All @@ -16,7 +16,7 @@ jobs:
test:
strategy:
matrix:
node-version: [18, 20, 21]
node-version: [18, 20, 22, 24]
runs-on: ubuntu-latest

services:
Expand All @@ -37,8 +37,8 @@ jobs:
- 27017:27017

steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
with:
node-version: ${{matrix.node-version}}
cache: yarn
Expand All @@ -52,8 +52,8 @@ jobs:
windows-test:
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
with:
cache: yarn
- run: yarn
Expand All @@ -64,14 +64,14 @@ jobs:
pack:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
with:
cache: yarn
- run: yarn
- run: echo "SHORT_SHA=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
- run: yarn pack -o appmap-node-${{ env.SHORT_SHA }}.tgz
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@v6
with:
name: appmap-node-${{ env.SHORT_SHA }}
path: appmap-node-${{ env.SHORT_SHA }}.tgz
Expand All @@ -88,11 +88,11 @@ jobs:
with:
app-id: ${{ secrets.RELEASE_BOT_APP_ID }}
private-key: ${{ secrets.RELEASE_BOT_PRIVATE_KEY }}
- uses: actions/checkout@v4
- uses: actions/checkout@v6
with:
fetch-depth: 0
token: ${{ steps.app-token.outputs.token }}
- uses: actions/setup-node@v4
- uses: actions/setup-node@v6
with:
cache: yarn
- run: yarn
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ jobs:
id-token: write
contents: read
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
with:
node-version: 24
cache: yarn
Expand Down
34 changes: 20 additions & 14 deletions src/hooks/mysql.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import assert from "node:assert";

import type mysql from "mysql";

import Recording from "../Recording";
import { getActiveRecordings, isActive } from "../recorder";
import { getTime } from "../util/getTime";

Expand Down Expand Up @@ -46,27 +47,32 @@ function createQueryProxy(query: mysql.QueryFunction) {
const recordings = getActiveRecordings();
const callEvents = recordings.map((recording) => recording.sqlQuery("mysql", sql));
const startTime = getTime();
// Capture the current async context so the callback can emit return events
// in the correct buffer position. See sqlite.ts for the same pattern.
const asyncContext = Recording.getContext();

const originalCallback =
typeof argArray[argArray.length - 1] === "function"
? (argArray.pop() as mysql.queryCallback)
: undefined;

const newCallback: mysql.queryCallback = (err, results, fields) => {
if (err)
recordings.forEach(
(recording, idx) =>
isActive(recording) &&
recording.functionException(callEvents[idx].id, err, startTime),
);
else
recordings.forEach(
(recording, idx) =>
isActive(recording) &&
recording.functionReturn(callEvents[idx].id, undefined, startTime),
);

originalCallback?.call(this, err, results, fields);
// Restore the captured context to keep events in causal order.
Recording.run(asyncContext, () => {
if (err)
recordings.forEach(
(recording, idx) =>
isActive(recording) &&
recording.functionException(callEvents[idx].id, err, startTime),
);
else
recordings.forEach(
(recording, idx) =>
isActive(recording) &&
recording.functionReturn(callEvents[idx].id, undefined, startTime),
);
originalCallback?.call(this, err, results, fields);
});
};

argArray.push(newCallback);
Expand Down
28 changes: 19 additions & 9 deletions src/hooks/sqlite.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
/* eslint-disable @typescript-eslint/unbound-method */
import type sqlite from "sqlite3";

import Recording from "../Recording";
import { getActiveRecordings, isActive } from "../recorder";
import { getTime } from "../util/getTime";

Expand Down Expand Up @@ -54,6 +55,11 @@ function createRecordingProxy<T extends RecordingProxyTarget>(
const recordings = getActiveRecordings();
const callEvents = recordings.map((recording) => recording.sqlQuery("sqlite", sql));
const startTime = getTime();
// Capture the current async context so the completion callback can emit
// return events in the correct buffer position. Native sqlite callbacks
// fire outside any AsyncLocalStorage context (especially in Node 24+),
// which would otherwise place return events after all buffered call events.
const asyncContext = Recording.getContext();

// Extract callback argument(s) to functionArgs
const functionArgs = [];
Expand Down Expand Up @@ -87,15 +93,19 @@ function createRecordingProxy<T extends RecordingProxyTarget>(
| ((...args: unknown[]) => unknown);

const newCompletionCallback = (...args: unknown[]) => {
const isError = args.length > 0 && args[0] != undefined;
if (!isError) {
recordings.forEach(
(recording, idx) =>
isActive(recording) &&
recording.functionReturn(callEvents[idx].id, undefined, startTime),
);
}
originalCompletionCallback?.apply(this, args);
// Restore the captured context to keep events in causal order, and run
// the original callback inside it so any instrumented work it does is
// also recorded in the correct position.
Recording.run(asyncContext, () => {
const isError = args.length > 0 && args[0] != undefined;
if (!isError)
recordings.forEach(
(recording, idx) =>
isActive(recording) &&
recording.functionReturn(callEvents[idx].id, undefined, startTime),
);
originalCompletionCallback?.apply(this, args);
});
};
newFunctionArgs.push(newCompletionCallback);

Expand Down
24 changes: 0 additions & 24 deletions src/loader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,28 +3,9 @@ import { URL, fileURLToPath } from "node:url";

import type { NodeLoaderHooksAPI2 } from "ts-node";

import config from "./config";
import { warn } from "./message";
import transform, { findHook, getSourceMap, shouldMatchOriginalSourcePaths } from "./transform";
import { readPkgUp } from "./util/readPkgUp";
import { forceRequire } from "./register";

export const resolve: NodeLoaderHooksAPI2["resolve"] = async function resolve(
url,
context,
nextResolve,
) {
const result = await nextResolve(url, context, nextResolve);

// For libraries, we preempt import with CommonJS require here, instead
// of load function, because for third party libraries we can catch
// their import name here (i.e. url: "json5"). Then it gets resolved
// to a path (i.e. result.path: ".../node_modules/json5/lib/index.js")
// and passed to the load function.
if (config().getPackage(url, true) != undefined) forceRequire(url);

return result;
};

export const load: NodeLoaderHooksAPI2["load"] = async function load(url, context, defaultLoad) {
const urlObj = new URL(url);
Expand Down Expand Up @@ -62,10 +43,5 @@ export const load: NodeLoaderHooksAPI2["load"] = async function load(url, contex
return original;
}

// For Prisma client modules, we preempt import with CommonJS require to allow our hooks to
// modify the loaded module in cache. http/https are handled in register.ts instead, where
// they are pre-patched in the main thread before any ESM imports can race against them.
if (config().prismaClientModuleIds.includes(url)) forceRequire(url);

return original;
};
67 changes: 31 additions & 36 deletions src/register.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import Module from "node:module";
import { pathToFileURL } from "node:url";
import worker from "node:worker_threads";

import config from "./config.js";
import { warn } from "./message.js";
import { record } from "./recorder.js";
import requireHook from "./requireHook.js";
import transform from "./transform.js";
Expand Down Expand Up @@ -32,42 +33,36 @@ declare global {
}
global.AppMapRecordHook = record;

// A side channel to allow requiring a module when requested from
// the loader. This is a bit of a hack to allow patching modules like
// http and https in ESM; import hooks don't have a way to modify
// the module after it's loaded (like require hooks), but they share
// module cache for the built-in modules. By catching an import
// and requiring it before the import happens we pre-populate the
// cache with our patched version.
export function forceRequire(specifier: string): void {
cjsRequire(specifier);
// We also broadcast to other threads so it works when the
// loader is in a separate thread (as in newer nodes) with
// a separate module cache.
esmRequireChannel.postMessage(specifier);
}

const cjsRequire = Module.createRequire(__filename);

// Pre-patch http/https in the main thread so they're already instrumented before any ESM module
// imports them. Without this there is a race: when an ESM module does `import http from "http"`,
// the loader thread calls forceRequire() and sends a BroadcastChannel message to patch http in the
// main thread. But the main thread evaluates the ESM module body synchronously and can call into
// http (e.g. new http.ClientRequest()) before the BroadcastChannel message is ever processed.
cjsRequire("node:http");
cjsRequire("node:https");
// Pre-patch library modules in the main thread so they're already instrumented before any ESM
// module imports them. The ESM load() hook cannot transform built-in modules (node:http etc.)
// since they have no source, and for CJS-format third-party libraries the CJS cache is the only
// shared state between the loader thread and the main thread. register.ts runs via --require
// before any ESM evaluation, so these require() calls are race-free.
function prePatch(id: string, { warnOnFailure = false } = {}) {
try {
cjsRequire(id);
} catch (err: unknown) {
if (!warnOnFailure) return;
// Warn rather than crash. The module may not be installed, or it may be
// installed but broken (bad transitive dep, syntax error, etc.). Either
// way, if the application never actually imports it the problem is
// invisible without appmap — crashing the process here would make appmap
// appear to be the cause. A warning gives the user enough context to
// investigate without interrupting their workflow.
const detail = err instanceof Error ? err.message : String(err);
warn(`could not pre-patch '${id}' for instrumentation: ${detail}`);
}
}

const esmRequireChannel = new worker.BroadcastChannel("appmap-node/register/esm-require").unref();
// Built-in patches: always attempted, silently skipped if not present.
prePatch("node:http");
prePatch("node:https");
for (const id of config().prismaClientModuleIds) prePatch(id);

esmRequireChannel.onmessage = (message: unknown) => {
if (
!(
message != null &&
typeof message == "object" &&
"data" in message &&
typeof message.data == "string"
)
)
return;
cjsRequire(message.data);
};
// User-configured library modules: warn if pre-patching fails, since the
// user explicitly asked for instrumentation of these.
for (const pkg of config().packages) {
if (pkg.module) prePatch(pkg.module, { warnOnFailure: true });
}
6 changes: 3 additions & 3 deletions test/__snapshots__/mongo.test.ts.snap
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ exports[`mapping MongoDB tests 1`] = `
"object_id": 5,
"value": "Promise { {
acknowledged: true,
insertedId: ObjectId { [Symbol(id)]: [Buffer [Uint8Array]] }
insertedId: ObjectId('...')
} }",
},
"thread_id": 0,
Expand Down Expand Up @@ -193,7 +193,7 @@ exports[`mapping MongoDB tests 1`] = `
"object_id": 7,
"value": "Promise { {
acknowledged: true,
insertedId: ObjectId { [Symbol(id)]: [Buffer [Uint8Array]] }
insertedId: ObjectId('...')
} }",
},
"thread_id": 0,
Expand Down Expand Up @@ -348,7 +348,7 @@ exports[`mapping MongoDB tests 1`] = `
"return_value": {
"class": "Promise<Object>",
"object_id": 14,
"value": "Promise { { _id: ObjectId { [Symbol(id)]: [Buffer [Uint8Array]] }, a: 3 } }",
"value": "Promise { { _id: ObjectId('...'), a: 3 } }",
},
"thread_id": 0,
},
Expand Down
Loading
Loading