From e8addfd43a612e3ff56dfbed16f2fd61f633c9e7 Mon Sep 17 00:00:00 2001 From: stefanrammo Date: Tue, 7 Apr 2026 14:21:43 +0300 Subject: [PATCH] Integrate logger client into cdp-client with service-based discovery Bundle cdplogger-client as studio.logger and add client.logger() method that discovers loggers via ServicesNotification and connects through proxy transport via makeServiceTransport (CDP 5.1+). - client.logger(name, options): auto-discovers logserver proxy service, creates service transport, passes to LoggerClient constructor - client.loggers(): returns all discovered logger services as [{name, metadata}] after first ServicesNotification - LoggerClient constructor extended to accept service transport object in addition to endpoint string (autoReconnect forced false) - Caching by name with eviction on transport close - close() disconnects cached loggers and rejects pending promises - Logger transports registered in serviceConnections for proper cleanup on primary connection drop - Rejects immediately on CDP < 5.1 (no proxy protocol support) - Proto files use raw .proto.js format matching studioapi.proto.js, with Node/browser parity via require() and window globals - Unit tests adapted from cdplogger-client test suite - README.rst updated with Logger Integration API documentation --- README.rst | 164 +++++ index.js | 230 ++++++- logger/container.proto.js | 64 ++ logger/database.proto.js | 170 ++++++ logger/logger-client.js | 1180 ++++++++++++++++++++++++++++++++++++ logger/variant.proto.js | 57 ++ test/logger-client.test.js | 455 ++++++++++++++ test/loggerFakeData.js | 180 ++++++ 8 files changed, 2499 insertions(+), 1 deletion(-) create mode 100644 logger/container.proto.js create mode 100644 logger/database.proto.js create mode 100644 logger/logger-client.js create mode 100644 logger/variant.proto.js create mode 100644 test/logger-client.test.js create mode 100644 test/loggerFakeData.js diff --git a/README.rst b/README.rst index cb3a328..662cac3 100644 --- a/README.rst +++ b/README.rst @@ -801,3 +801,167 @@ node.removeChild(name) Remove child Node from this Node. +Logger Integration (CDP 5.1+) +----------------------------- + +The CDP Logger client is bundled into ``cdp-client`` as ``studio.logger``. Logger +discovery and connection is handled automatically via the StudioAPI proxy — no need +to manually discover the logger port or manage a separate WebSocket connection. +For more information about CDP Logger, see https://cdpstudio.com/manual/cdp/cdplogger/cdplogger-index.html. + +Logger data is served behind StudioAPI authentication and tunneled through the +existing proxy connection. + +client.logger(name, options) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Arguments + + name (optional) - Logger service name (node path) to filter by, e.g. ``"App.CDPLogger"`` + + options (optional) - ``{ timeout: number }`` Timeout in milliseconds. Rejects if no matching service appears in time. + +- Returns + + Promise. - Resolves with a connected logger client instance. + +- Usage + + Auto-discovers the first available CDPLogger or LogServer via service discovery, + creates a proxy transport, and returns a ready-to-use logger client. Repeated calls + return the same cached instance. A cached logger whose transport has closed is evicted + on the next call. + +- Example + + .. code:: javascript + + const studio = require("cdp-client"); + const client = new studio.api.Client("127.0.0.1:7689"); + + // Auto-discover any available logger + client.logger().then(function(logger) { + return logger.requestLogLimits().then(function(limits) { + return logger.requestDataPoints( + ["Temperature", "Pressure"], + limits.startS, limits.endS, 200, 0 + ); + }).then(function(points) { + points.forEach(function(p) { + var temp = p.value["Temperature"]; + console.log(new Date(p.timestamp * 1000), "min:", temp.min, "max:", temp.max); + }); + }); + }); + + .. code:: javascript + + // Discover a specific logger by name + client.logger("App.CDPLogger").then(function(logger) { + return logger.requestLoggedNodes().then(function(nodes) { + console.log("Logged nodes:", nodes.map(function(n) { return n.name; })); + }); + }); + + .. code:: javascript + + // Query events + client.logger().then(function(logger) { + return logger.requestEvents({ + limit: 100, + flags: studio.logger.Client.EventQueryFlags.NewestFirst + }).then(function(events) { + events.forEach(function(e) { + console.log(e.sender, e.data); + }); + }); + }); + + .. code:: javascript + + // With timeout — rejects if no logger found within 5 seconds + client.logger("App.CDPLogger", { timeout: 5000 }).catch(function(err) { + console.log(err); // "Timeout: no logger service found for 'App.CDPLogger'" + }); + +client.loggers() +^^^^^^^^^^^^^^^^ + +- Returns + + Promise.> - All available logger services. Waits for + service discovery if none have been received yet. + +- Usage + + Returns the list of all discovered logger services. Each entry has ``name`` + (the logger's node path) and ``metadata`` (service metadata including ``proxy_type``). + +- Example + + .. code:: javascript + + client.loggers().then(function(loggers) { + loggers.forEach(function(l) { + console.log(l.name, l.metadata.proxy_type); // "App.CDPLogger" "logserver" + }); + }); + +studio.logger.Client +~~~~~~~~~~~~~~~~~~~~ + +The logger client class is also available as ``studio.logger.Client`` for standalone +usage when the logger endpoint is already known. + +Node.js +""""""" + +In Node.js, the logger client is loaded automatically: + + .. code:: javascript + + const studio = require("cdp-client"); + var loggerClient = new studio.logger.Client("127.0.0.1:17000"); + +Browser +""""""" + +In the browser, load the proto files and logger client before ``index.js``: + + .. code:: html + + + + + + + + +Then use via ``studio.logger.Client``: + + .. code:: javascript + + var loggerClient = new studio.logger.Client(window.location.hostname + ":17000"); + +Static properties: + +- ``studio.logger.Client.EventQueryFlags`` - ``{ None: 0, NewestFirst: 1, TimeRangeBeginExclusive: 2, TimeRangeEndExclusive: 4, UseLogStampForTimeRange: 8 }`` +- ``studio.logger.Client.MatchType`` - ``{ Exact: 0, Wildcard: 1 }`` + +LoggerClient API +~~~~~~~~~~~~~~~~ + +The logger client returned by ``client.logger()`` provides these methods: + +- ``requestApiVersion()`` - Returns Promise with the logger API version string. +- ``requestLoggedNodes()`` - Returns Promise with array of ``{ name, routing, tags }`` for each logged signal. +- ``requestLogLimits()`` - Returns Promise with ``{ startS, endS }`` — the earliest and latest logged timestamps in seconds. +- ``requestDataPoints(nodeNames, startS, endS, noOfDataPoints, limit)`` - Returns Promise with array of data points. Each point has ``{ timestamp, value }`` where value maps signal names to ``{ min, max, last }``. +- ``requestEvents(query)`` - Returns Promise with array of events. Query supports ``limit``, ``offset``, ``flags``, ``timeRangeBegin``, ``timeRangeEnd``, ``codeMask``, ``senderConditions``, ``dataConditions``. +- ``countEvents(query)`` - Returns Promise with the count of matching events. +- ``getEventCodeDescription(code)`` - Returns human-readable description for an event code (e.g. ``"AlarmSet + SourceObjectUnavailable"``). +- ``getEventCodeString(code)`` - Returns compact event code string (e.g. ``"RepriseAlarmSet"``, ``"Ack"``). +- ``getSenderTags(sender)`` - Returns Promise with tags for an event sender. +- ``disconnect()`` - Closes the connection and disables auto-reconnect. +- ``setEnableTimeSync(enable)`` - Enable or disable automatic time synchronization with the server. + diff --git a/index.js b/index.js index c89d6ac..aaf567c 100644 --- a/index.js +++ b/index.js @@ -1560,6 +1560,49 @@ studio.internal = (function(proto) { return { transport: transport, instanceKey: instanceKey }; } + this.createServiceTransport = function(serviceId) { + var result = makeServiceTransport(serviceId); + // Register in serviceConnections so cleanupPrimaryConnectionState tears it down + serviceConnections.set(result.instanceKey, result.transport); + return result; + }; + + function isLoggerService(service) { + return service.type === 'websocketproxy' && + service.metadata && + service.metadata.proxy_type === 'logserver'; + } + + this.findLoggerService = function(name) { + for (var service of availableServices.values()) { + if (isLoggerService(service) && (!name || service.name === name)) { + return service; + } + } + return null; + }; + + this.findAllLoggerServices = function() { + var result = []; + availableServices.forEach(function(service) { + if (isLoggerService(service)) { + result.push({ name: service.name, metadata: service.metadata }); + } + }); + return result; + }; + + var serviceUpdateListeners = []; + + this.addServiceUpdateListener = function(fn) { + serviceUpdateListeners.push(fn); + }; + + this.removeServiceUpdateListener = function(fn) { + var idx = serviceUpdateListeners.indexOf(fn); + if (idx !== -1) serviceUpdateListeners.splice(idx, 1); + }; + function resendServicesRequest() { if (currentMetadata && currentMetadata.compatVersion >= PROXY_MIN_COMPAT_VERSION) { console.log("Did not receive services notification within expected interval. Re-requesting services."); @@ -1664,6 +1707,9 @@ studio.internal = (function(proto) { if (appConnection.onServicesUpdated) { appConnection.onServicesUpdated(); } + serviceUpdateListeners.slice().forEach(function(fn) { + try { fn(); } catch (e) { console.error("Service update listener threw:", e); } + }); resetServicesTimeout(); }; @@ -2634,7 +2680,7 @@ studio.api = (function(internal) { return pathParts.reduce(findNode, self.root()); } - // timeout: 0 means immediate fail (old behavior) + // timeout: 0 means fail immediately if the app is not currently available if (options && options.timeout === 0) { return doFind(); } @@ -2656,12 +2702,194 @@ studio.api = (function(internal) { return system._getAppConnections(); }; + // --- Logger integration --- + + var LoggerClient = studio.logger ? studio.logger.Client : null; + var loggerClients = {}; + var loggerPromises = {}; + var pendingLoggerCleanups = []; + + function getPrimaryConnection() { + return system._getAppConnections()[0] || null; + } + + function createLoggerFromService(primary, service, cacheKey) { + var result = primary.createServiceTransport(Number(service.serviceId)); + var loggerClient = new LoggerClient(result.transport, false); + loggerClients[cacheKey] = loggerClient; + delete loggerPromises[cacheKey]; + return loggerClient; + } + + /** + * Get a logger client via service-based discovery and proxy transport (CDP 5.1+). + * @param {string} [name] - Logger service name (node path) to filter by. + * @param {Object} [options] - Options. + * @param {number} [options.timeout] - Timeout in milliseconds. Rejects if no matching service appears in time. + * @returns {Promise.} + */ + this.logger = function(name, options) { + var timeout = options && options.timeout; + if (!LoggerClient) { + return Promise.reject("Logger client not available"); + } + var cacheKey = name || '__default__'; + if (loggerClients[cacheKey] && !loggerClients[cacheKey].disconnected) { + return Promise.resolve(loggerClients[cacheKey]); + } + delete loggerClients[cacheKey]; + if (loggerPromises[cacheKey]) { + return loggerPromises[cacheKey]; + } + + var promise = new Promise(function(resolve, reject) { + system.onConnect(function() { + var primary = getPrimaryConnection(); + if (!primary) { + reject("No primary connection"); + return; + } + if (!primary.supportsProxyProtocol()) { + reject("Logger service discovery requires CDP 5.1+"); + return; + } + var service = primary.findLoggerService(name); + if (service) { + resolve(createLoggerFromService(primary, service, cacheKey)); + return; + } + // Wait for matching service to appear + var settled = false; + var timer = null; + + function settle() { + if (settled) return false; + settled = true; + clearTimeout(timer); + primary.removeServiceUpdateListener(listener); + var idx = pendingLoggerCleanups.indexOf(cleanup); + if (idx !== -1) pendingLoggerCleanups.splice(idx, 1); + return true; + } + + if (timeout) { + timer = setTimeout(function() { + if (settle()) { + delete loggerPromises[cacheKey]; + reject("Timeout: no logger service found" + (name ? " for '" + name + "'" : "")); + } + }, timeout); + } + + function listener() { + var svc = primary.findLoggerService(name); + if (svc && settle()) { + resolve(createLoggerFromService(primary, svc, cacheKey)); + } + } + + var cleanup = { + reject: function() { + if (settle()) { + delete loggerPromises[cacheKey]; + reject("Connection closed"); + } + } + }; + pendingLoggerCleanups.push(cleanup); + primary.addServiceUpdateListener(listener); + }, reject, autoConnect); + }); + + loggerPromises[cacheKey] = promise; + promise.catch(function() { delete loggerPromises[cacheKey]; }); + return promise; + }; + + /** + * Get all available logger services. Resolves immediately if services + * have already been discovered, otherwise waits for the first update. + * @returns {Promise.>} + */ + this.loggers = function() { + return new Promise(function(resolve, reject) { + system.onConnect(function() { + var primary = getPrimaryConnection(); + if (!primary) { + reject("No primary connection"); + return; + } + if (!primary.supportsProxyProtocol()) { + reject("Logger service discovery requires CDP 5.1+"); + return; + } + if (primary.services().size > 0) { + resolve(primary.findAllLoggerServices()); + return; + } + var settled = false; + function settle() { + if (settled) return false; + settled = true; + primary.removeServiceUpdateListener(listener); + var idx = pendingLoggerCleanups.indexOf(cleanup); + if (idx !== -1) pendingLoggerCleanups.splice(idx, 1); + return true; + } + function listener() { + if (settle()) { + resolve(primary.findAllLoggerServices()); + } + } + var cleanup = { + reject: function() { + if (settle()) { + reject("Connection closed"); + } + } + }; + pendingLoggerCleanups.push(cleanup); + primary.addServiceUpdateListener(listener); + }, reject, autoConnect); + }); + }; + + // Wrap close() to clean up logger clients + var originalClose = this.close; + this.close = function() { + Object.keys(loggerClients).forEach(function(key) { + try { loggerClients[key].disconnect(); } catch (e) { + console.error("Error disconnecting logger client:", e); + } + }); + loggerClients = {}; + loggerPromises = {}; + // Reject all pending logger/loggers promises and remove their listeners + var cleanups = pendingLoggerCleanups.slice(); + pendingLoggerCleanups = []; + cleanups.forEach(function(c) { c.reject(); }); + originalClose(); + }; }; return obj; })(studio.internal); +/* -------------------------------------------------------------------------- + * Logger client bundle + * ------------------------------------------------------------------------ */ +studio.logger = (function() { + var isNode = (typeof process !== 'undefined') && process.versions && process.versions.node && (typeof window === 'undefined'); + if (isNode) { + return require('./logger/logger-client.js'); + } + if (typeof window !== 'undefined' && window.cdplogger) { + return window.cdplogger; + } + return null; +})(); + /* -------------------------------------------------------------------------- * Module export (CommonJS/ES Module hybrid) * ------------------------------------------------------------------------ */ diff --git a/logger/container.proto.js b/logger/container.proto.js new file mode 100644 index 0000000..1be43b0 --- /dev/null +++ b/logger/container.proto.js @@ -0,0 +1,64 @@ +const containerProtoText = ` +// This is the .proto file in Google Protocol Buffers format. +// When this file is compiled with Google Protocol Buffers compiler +// (https://code.google.com/p/protobuf/downloads/list), then Java/Python/C++ +// code is generated which contains methods for serializing and deserializing +// the messages contained in this .proto file. + +syntax = "proto2"; + +package DBMessaging.Protobuf; + +option optimize_for = LITE_RUNTIME; +option java_package = "no.icd.dbmessaging.protobuf"; + +import "database.proto"; + +/** Common union-style base type for all Protobuf messages in DB. */ +message Container { + enum Type { + eSignalInfoRequest = 1; + eSignalInfoResponse = 2; + eSignalDataRequest = 3; + eSignalDataResponse = 4; + eCriterionLimitsRequest = 5; + eCriterionLimitsResponse = 6; + eVersionRequest = 7; + eVersionResponse = 8; + eError = 9; + eTimeRequest = 10; + eTimeResponse = 11; + eEventSenderTagsRequest = 12; + eEventSenderTagsResponse = 13; + eCountEventsRequest = 14; + eCountEventsResponse = 15; + eEventsRequest = 16; + eEventsResponse = 17; + } + optional Type message_type = 1; + optional SignalInfoRequest signal_info_request = 2; + optional SignalInfoResponse signal_info_response = 3; + optional SignalDataRequest signal_data_request = 4; + optional SignalDataResponse signal_data_response = 5; + optional CriterionLimitsRequest criterion_limits_request = 6; + optional CriterionLimitsResponse criterion_limits_response = 7; + optional VersionRequest version_request = 8; + optional VersionResponse version_response = 9; + optional Error error = 10; + optional TimeRequest time_request = 11; + optional TimeResponse time_response = 12; + optional EventSenderTagsRequest event_sender_tags_request = 13; + optional EventSenderTagsResponse event_sender_tags_response = 14; + optional CountEventsRequest count_events_request = 15; + optional CountEventsResponse count_events_response = 16; + optional EventsRequest events_request = 17; + optional EventsResponse events_response = 18; + extensions 100 to max; +} +`; + +if (typeof module !== 'undefined' && module.exports) { + module.exports = containerProtoText; +} else if (typeof window !== 'undefined') { + window.containerProto = containerProtoText; +} diff --git a/logger/database.proto.js b/logger/database.proto.js new file mode 100644 index 0000000..96bd08f --- /dev/null +++ b/logger/database.proto.js @@ -0,0 +1,170 @@ +const databaseProtoText = ` +// This is the .proto file in Google Protocol Buffers format. +// When this file is compiled with Google Protocol Buffers compiler +// (https://code.google.com/p/protobuf/downloads/list), then Java/Python/C++ +// code is generated which contains methods for serializing and deserializing +// the messages contained in this .proto file. + +syntax = "proto2"; + +package DBMessaging.Protobuf; + +option optimize_for = LITE_RUNTIME; +option java_package = "no.icd.dbmessaging.protobuf"; + +import "variant.proto"; + +// Signal queries + +message SignalInfoRequest { + optional uint32 request_id = 1; +} + +message SignalInfoResponse { + optional uint32 request_id = 1; + repeated string name = 2; + repeated uint32 id = 3; + repeated ICD.Protobuf.CDPValueType type = 4; + repeated string path = 5; + repeated TagMap tagMap = 6; +} + +message TagInfo { + optional string value = 1; + optional string source = 2; +} + +message TagMap { + map tags = 1; +} + +message SignalDataRequest { + optional uint32 request_id = 1; + repeated uint32 signal_id = 2; + optional double criterion_min = 3; + optional double criterion_max = 4; + optional uint32 num_of_datapoints = 5; // requested resolution + optional uint32 limit = 6; // Return the first 'n' rows of the query result +} + +message SignalDataResponse { + optional uint32 request_id = 1; // corresponds to SignalDataRequest::request_id + repeated double criterion = 2; + repeated SignalDataRow row = 3; +} + +message SignalDataRow { + repeated uint32 signal_id = 1; + repeated ICD.Protobuf.VariantValue min_values = 2; + repeated ICD.Protobuf.VariantValue max_values = 3; + repeated ICD.Protobuf.VariantValue last_values = 4; +} + +message CriterionLimitsRequest { + optional uint32 request_id = 1; +} + +message CriterionLimitsResponse { + optional uint32 request_id = 1; // corresponds to CriterionLimitsRequest::request_id + optional double criterion_min = 2; + optional double criterion_max = 3; +} + +// Event queries + +message Event { + optional string sender = 1; + map data = 2; + optional double timestamp_sec = 3; + optional uint64 id = 4; + optional uint32 code = 5; + optional uint32 status = 6; + optional double logstamp_sec = 7; +} + +message EventQuery { + enum MatchType { + Exact = 0; + Wildcard = 1; + } + + message Condition { + optional string value = 1; + optional MatchType type = 2; + } + + message ConditionList { + repeated Condition conditions = 1; + } + + optional double time_range_begin = 1; + optional double time_range_end = 2; + optional uint32 code_mask = 3; + optional uint32 limit = 4; + optional uint32 offset = 5; + optional uint32 flags = 6; + optional ConditionList sender_conditions = 7; + map data_conditions = 8; +} + +message EventSenderTagsRequest { + optional uint32 request_id = 1; +} + +message EventSenderTagsResponse { + optional uint32 request_id = 1; + map sender_tags = 2; +} + +message CountEventsRequest { + optional uint32 request_id = 1; + optional EventQuery query = 2; +} + +message CountEventsResponse { + optional uint32 request_id = 1; + optional int64 count = 2; +} + +message EventsRequest { + optional uint32 request_id = 1; + optional EventQuery query = 2; +} + +message EventsResponse { + optional uint32 request_id = 1; + repeated Event events = 2; +} + +// Server info queries + +message VersionRequest { + optional uint32 request_id = 1; +} + +message VersionResponse { + optional uint32 request_id = 1; + optional string version = 2; +} + +message Error { + optional uint32 request_id = 1; + optional string errorMessage = 2; + optional int32 errorCode = 3; +} + +message TimeRequest { + optional uint32 request_id = 1; +} + +message TimeResponse { + optional uint32 request_id = 1; + optional fixed64 timestamp = 2; // nanoseconds +} +`; + +if (typeof module !== 'undefined' && module.exports) { + module.exports = databaseProtoText; +} else if (typeof window !== 'undefined') { + window.databaseProto = databaseProtoText; +} diff --git a/logger/logger-client.js b/logger/logger-client.js new file mode 100644 index 0000000..1770f56 --- /dev/null +++ b/logger/logger-client.js @@ -0,0 +1,1180 @@ +// Environment detection and dependency loading +let WS; // WebSocket constructor +let root; // protobuf root namespace + +if (typeof window === 'undefined') { + // ---- Node / CommonJS ---- + WS = global.WebSocket || require('ws'); + global.WebSocket = WS; + var protobuf = require('protobufjs'); + root = protobuf.parse(require('./variant.proto.js')).root; + protobuf.parse(require('./database.proto.js'), root); + protobuf.parse(require('./container.proto.js'), root); +} else { + // ---- Browser ---- + WS = window.WebSocket; + var protobuf = window.protobuf; + root = protobuf.parse(window.variantProto).root; + protobuf.parse(window.databaseProto, root); + protobuf.parse(window.containerProto, root); +} + +const Container = root.DBMessaging.Protobuf.Container; +const CDPValueType = root.ICD.Protobuf.CDPValueType; +const EventQuery = root.DBMessaging.Protobuf.EventQuery; + + +/** + * A client for interacting with a CDP Logger or LogServer via WebSocket. + * + * This client handles: + * - Automatic reconnection (if enabled) + * - Requesting and parsing responses for version, logged nodes, log limits, data points, and events + * - Time synchronization between the client and the server + */ +class Client { + // Defined property names to use instead of ambiguous numbers. + static EventQueryFlags = Object.freeze({ + None: 0, + NewestFirst: 1, + TimeRangeBeginExclusive: 2, + TimeRangeEndExclusive: 4, + UseLogStampForTimeRange: 8 + }); + + static MatchType = Object.freeze({ + Exact: 0, + Wildcard: 1 + }); + + /** + * Create a new Client instance to communicate with the logger. + * + * @param {string|object} endpointOrTransport - The logger endpoint (e.g. "127.0.0.1:17000") + * or a service transport object with send/close/onopen/onmessage/onclose/onerror. + * @param {boolean} [autoReconnect=true] - Whether to automatically reconnect if the connection is lost. + * Forced to false when a transport object is provided. + */ + constructor(endpointOrTransport, autoReconnect = true) { + this.reqId = -1; + this.enableTimeSync = true; + this.disconnected = false; + this.isOpen = false; + this.queuedRequests = {}; + this.storedPromises = {}; + this.nameToId = {}; + this.idToName = {}; + this.nameToType = {}; + this.timeDiff = 0; + this.timeReceived = null; + this.lastTimeRequest = Date.now() / 1000; + this.haveSentQueuedReq = false; + this.roundTripTimes = {}; + this.senderTags = {}; + this.pendingSenderTags = {}; + + if (typeof endpointOrTransport === 'object' && typeof endpointOrTransport.send === 'function') { + // Service transport mode — tunnel through StudioAPI proxy + this.autoReconnect = false; + var self = this; + var transport = endpointOrTransport; + transport.onopen = function() { self._onOpen(transport); }; + transport.onmessage = function(event) { self._handleMessage(transport, event.data); }; + transport.onerror = function(error) { self._onError(transport, error); }; + transport.onclose = function() { self._onClose(transport); }; + this.ws = transport; + } else { + // Endpoint string mode — direct WebSocket connection + this.autoReconnect = autoReconnect; + var url = endpointOrTransport; + if (!/^wss?:\/\//.test(url)) { + url = 'ws://' + url; + } + this.ws = this._connect(url); + } + } + + + /** + * Enable or disable time synchronization with the server. + * + * When enabled, the client automatically requests and calculates the time offset + * (`timeDiff`) between the client and server to align timestamps. This can help + * ensure data queries (e.g., requestDataPoints, requestEvents) are aligned with + * the server's notion of time. Re-enabling time sync triggers a new offset + * calculation on the next request or after a timeout. For an immediate sync, + * call `_updateTimeDiff()` explicitly. + * + * @param {boolean} enable - True to enable, false to disable time sync. + */ + setEnableTimeSync(enable) { + this.enableTimeSync = enable; + if (!enable) { + // Cancel all pending requests so they won't resolve after time sync is disabled. + for (const key in this.storedPromises) { + this.storedPromises[key].reject(new Error("Time sync disabled")); + } + this.storedPromises = {}; + } + } + + /** + * Disconnect from the server, closing the WebSocket connection. + * + * This also disables auto-reconnect and clears any queued or pending requests. + * After calling `disconnect()`, you can create a new Client instance to + * re-establish a connection. + */ + disconnect() { + this.autoReconnect = false; + this.disconnected = true; + this._cleanupQueuedRequests(); + this.isOpen = false; + if (this.ws) { + this.ws.close(); + } + } + + // --- Public API methods --- + + /** + * Request the API version from the connected CDP Logger or LogServer. + * + * In CDP Studio, this corresponds to the version of the CDP runtime + * or the logger server that you are connecting to. The version can be used + * to ensure compatibility with certain features. + * + * Version History: + * - 3.0 (2017-08, CDP 4.3): Minimum supported version. + * - 3.1 (2020-08, CDP 4.9): + * - Support for reading full resolution data by setting noOfDataPoints to 0. + * - Added a limit argument to data point requests (behaves like SQL LIMIT, where 0 means no limit). + * - The server now notifies of dropped queries by returning a TooManyRequests error + * when too many pending requests exist. + * - 3.2 (2022-11, CDP 4.11): Limits queries to 50,000 rows to avoid overloading the logger app; + * larger data sets should be downloaded in patches. + * - 4.0 (2024-01, CDP 4.12): + * - Added NodeTag support to save custom tags for logged values (e.g. Unit or Description), + * accessible via the client's API. + * - Reduced network usage by having data responses only include changes instead of repeating unchanged values. + * - Added support for string values and events. + * + * @returns {Promise} A promise that resolves with the version string + * (e.g., "4.5.2"). If the version is below 3.0, the promise is rejected with + * an error indicating an incompatible version. + */ + _rejectIfDisconnected() { + if (this.disconnected) { + return Promise.reject(new Error("Client is disconnected")); + } + return null; + } + + requestApiVersion() { + const rejected = this._rejectIfDisconnected(); + if (rejected) return rejected; + this._timeRequest(); + const requestId = this._getRequestId(); + if (!this.isOpen) { + this.queuedRequests[requestId] = "api_version"; + } else { + this._sendApiVersionRequest(requestId); + } + return new Promise((resolve, reject) => { + this.storedPromises[requestId] = { resolve, reject }; + }); + } + + /** + * Request the list of logged nodes. + * + * In CDP Studio, this corresponds to the "LoggedValues" table of the + * CDPLogger component. The returned list includes node + * names, paths, and any associated tags that might be assigned to + * those nodes. + * + * @returns {Promise} A promise that resolves with an array of + * node objects. Each object includes: + * - `name` (string): The node name + * - `routing` (string): The node path + * - `tags` (object): Optional key/value pairs providing additional + * node metadata + */ + requestLoggedNodes() { + const rejected = this._rejectIfDisconnected(); + if (rejected) return rejected; + this._timeRequest(); + const requestId = this._getRequestId(); + if (!this.isOpen) { + this.queuedRequests[requestId] = "logged_nodes"; + } else { + this._sendLoggedNodesRequest(requestId); + } + return new Promise((resolve, reject) => { + this.storedPromises[requestId] = { resolve, reject }; + }); + } + + /** + * Request the log limits (start and end times of available data). + * + * In CDP Studio, this corresponds to the earliest and latest times + * for which log data is available in the CDPLogger (or LogServer). + * + * @returns {Promise} A promise that resolves with an object + * containing: + * - `startS` (number): The earliest available timestamp (in seconds). + * - `endS` (number): The latest available timestamp (in seconds). + */ + requestLogLimits() { + const rejected = this._rejectIfDisconnected(); + if (rejected) return rejected; + this._timeRequest(); + const requestId = this._getRequestId(); + if (!this.isOpen) { + this.queuedRequests[requestId] = "log_limits"; + } else { + this._sendLogLimitsRequest(requestId); + } + return new Promise((resolve, reject) => { + this.storedPromises[requestId] = { resolve, reject }; + }); + } + + /** + * Request data points for the specified node names over a given time range. + * + * This retrieves time-series data from the logged nodes + * (CDP signals, arguments, properties and other value nodes) in the specified range. The number of data points is + * adjustable, allowing for either raw or decimated data. + * + * @param {Array} nodeNames - The names of the nodes/signals to retrieve. + * @param {number} startS - The start time (in seconds since epoch). + * @param {number} endS - The end time (in seconds since epoch). + * @param {number} noOfDataPoints - The maximum number of data points to retrieve. + * - If you specify a nonzero value, the server will decimate or downsample + * the data to roughly that many points across [startS..endS]. + * - If you set it to 0, the server returns the data at full resolution + * (i.e., no decimation). + * @param {number} limit - Similar to SQL LIMIT. It allows you to request data + * in batches by setting the maximum batch size (the number of samples). + * Note, reading data in larger batches will improve performance but also allocate more memory. + * @returns {Promise} A promise that resolves with an array of objects, + * where each object has: + * - `timestamp` (number): The time (in seconds) for the data row. + * - `value` (object): A key-value mapping of node names to an object with + * `min`, `max`, and `last` properties representing the node's values + * at that timestamp. + */ + requestDataPoints(nodeNames, startS, endS, noOfDataPoints, limit) { + const rejected = this._rejectIfDisconnected(); + if (rejected) return rejected; + this._timeRequest(); + const requestId = this._getRequestId(); + const promise = new Promise((resolve, reject) => { + this.storedPromises[requestId] = { resolve, reject }; + }); + if (!this.isOpen) { + this.queuedRequests[requestId] = ["node_values", nodeNames, startS, endS, noOfDataPoints, limit]; + } else { + this._reqDataPoints(nodeNames, startS, endS, noOfDataPoints, limit, requestId); + } + return promise; + } + + /** + * Request events based on the provided query parameters. + * + * In CDP Studio, this corresponds to event log queries for the + * CDPLogger (or LogServer). The query parameters allow filtering by + * sender, data fields, code masks, and time ranges, among others. + * + * The `query.flags` field uses bitmask values similar to an enum: + * 0 = None + * 1 = NewestFirst + * 2 = TimeRangeBeginExclusive + * 4 = TimeRangeEndExclusive + * 8 = UseLogStampForTimeRange + * + * For additional information: + * https://cdpstudio.com/manual/cdp/cdp2sql/logmanager-eventquery.html#Flags-enum + * https://cdpstudio.com/manual/cdp/cdplogger/eventlogreader.html#cdp-event-code-flags + * + * Allowed query keys: + * - timeRangeBegin (number) + * - timeRangeEnd (number) + * - limit (number) + * - offset (number) + * - codeMask (number) + * - flags (number) + * - senderConditions (array) + * - dataConditions (object) + * + * Each event object typically includes the following fields: + * - `sender` (string): The event sender. + * - `data` (object): An object containing event-specific details: + * - `Text` (string): The event text message. + * - `Level` (string): The event level (e.g., "ERROR"). + * - `Description` (string): A detailed description of the event. + * - `Group` (string): A group identifier for the event. + * - `timestampSec` (number): The timestamp (in seconds) when the event occurred. + * - `id` (string): A unique identifier for the event. + * - `code` (number): The raw event code returned by the server. + * - `status` (number): The status code associated with the event. + * - `logstampSec` (number): The log timestamp (in seconds) when the event was logged. + * + * Example usage: + * client.requestEvents({ + * timeRangeBegin: 1609459200, + * timeRangeEnd: 1609545600, + * senderConditions: ["CDPLoggerDemoApp.InvalidLicense"], + * dataConditions: { + * Text: ["Invalid or missing feature license detected."], + * // Multiple data conditions can be specified: + * Level: { value: "ERROR", matchType: cdplogger.Client.MatchType.Exact } + * }, + * limit: 100, + * offset: 0, + * flags: cdplogger.Client.EventQueryFlags.NewestFirst + * }); + * + * @param {Object} query - A simple plain object representing the EventQuery. + * @returns {Promise} Resolves with an array of event objects. + */ + requestEvents(query) { + const rejected = this._rejectIfDisconnected(); + if (rejected) return rejected; + this._timeRequest(); + const requestId = this._getRequestId(); + const eventQuery = this._buildEventQuery(query); + if (!this.isOpen) { + this.queuedRequests[requestId] = { type: "events", query: eventQuery }; + } else { + this._sendEventsRequest(requestId, eventQuery); + } + return new Promise((resolve, reject) => { + this.storedPromises[requestId] = { resolve, reject }; + }) + .then(events => { + // Collect the unique sender names from events that lack cached tags. + const missingSenders = Array.from(new Set( + events + .filter(evt => !this.senderTags[evt.sender]) + .map(evt => evt.sender) + )); + + if (missingSenders.length === 0) { + return events; + } + // Request tag info for all missing senders. + return Promise.all( + missingSenders.map(sender => this.getSenderTags(sender)) + ).then(() => { + // Attach tags to events after tag info is available. + events.forEach(evt => { + evt.tags = this.senderTags[evt.sender]; + }); + return events; + }); + }); + } + + /** + * Request a count of events that match the given query. + * + * The query object accepts the same keys as in requestEvents(). + * + * @param {Object} query - The event query object. + * @returns {Promise} A promise that resolves with the count of events. + */ + countEvents(query) { + const rejected = this._rejectIfDisconnected(); + if (rejected) return rejected; + this._timeRequest(); + const requestId = this._getRequestId(); + const eventQuery = this._buildEventQuery(query); + if (!this.isOpen) { + this.queuedRequests[requestId] = { type: "countEvents", query: eventQuery }; + } else { + this._sendCountEventsRequest(requestId, eventQuery); + } + return new Promise((resolve, reject) => { + this.storedPromises[requestId] = { resolve, reject }; + }); + } + + /** + * Converts a numeric CDP event code into a descriptive string, + * combining multiple flags if needed. + * + * Common codes (from the docs): + * 0x1 = AlarmSet + * 0x2 = AlarmClr + * 0x4 = AlarmAck + * 0x40 = AlarmReprise + * 0x100 = SourceObjectUnavailable + * 0x40000000 = NodeBoot + * + * @param {number} code - The event code from an events response. + * @returns {string} - A human-readable combination of flags, + * such as "AlarmSet + SourceObjectUnavailable". + */ + getEventCodeDescription(code) { + const flags = []; + if (code & 0x1) flags.push("AlarmSet"); + if (code & 0x2) flags.push("AlarmClr"); + if (code & 0x4) flags.push("AlarmAck"); + if (code & 0x40) flags.push("AlarmReprise"); + if (code & 0x100) flags.push("SourceObjectUnavailable"); + if (code & 0x40000000) flags.push("NodeBoot"); + + if (flags.length === 0) { + flags.push("None"); + } + return flags.join(" + "); + } + + /** + * Returns a human‐readable string for a given event code. + * If multiple flags are set, it attempts to identify known + * combinations; otherwise, it combines them with a plus sign. + * + * @param {number} code - The numeric event code. + * @returns {string} - The corresponding event code string. + */ + getEventCodeString(code) { + if (code === 0) return ""; + const EventCodeFlags = { + AlarmSet: 0x1, + AlarmClr: 0x2, + AlarmAck: 0x4, + AlarmReprise: 0x40, + SourceObjectUnavailable: 0x100, + NodeBoot: 0x40000000 + }; + + // Check for specific single-flag codes or two-flag combos + if (code === EventCodeFlags.AlarmSet) return "AlarmSet"; + if (code === EventCodeFlags.AlarmClr) return "AlarmClear"; + if (code === EventCodeFlags.AlarmAck) return "Ack"; + if (code === EventCodeFlags.AlarmReprise) return "Reprise"; + if (code === (EventCodeFlags.AlarmReprise | EventCodeFlags.AlarmSet)) + return "RepriseAlarmSet"; + if (code === (EventCodeFlags.AlarmReprise | EventCodeFlags.AlarmClr)) + return "RepriseAlarmClear"; + if (code === (EventCodeFlags.AlarmReprise | EventCodeFlags.AlarmAck)) + return "RepriseAck"; + + // Otherwise, combine the flag strings based on which bits are set + let s = ""; + if (code & EventCodeFlags.AlarmReprise) + s += (s ? "+" : "") + "Reprise"; + if (code & EventCodeFlags.AlarmSet) + s += (s ? "+" : "") + "AlarmSet"; + if (code & EventCodeFlags.AlarmClr) + s += (s ? "+" : "") + "AlarmClear"; + if (code & EventCodeFlags.AlarmAck) + s += (s ? "+" : "") + "Ack"; + if (code & EventCodeFlags.NodeBoot) + s += (s ? "+" : "") + "EventNodeBoot"; + if (code & EventCodeFlags.SourceObjectUnavailable) + s += (s ? "+" : "") + "SourceObjectUnavailable"; + + return s; + } + + /** + * Retrieves the tags associated with a given sender. + * + * This method checks if the tags for the specified sender are already cached. If so, it returns a + * resolved promise with the cached tags. Otherwise, it initializes a pending promise for the sender, + * sends a request for the sender's tags using `_sendEventSenderTagsRequest`, and returns a promise that + * resolves when the tags are received. + * + * @param {string} sender - The identifier of the event sender. + * @returns {Promise} A promise that resolves with an object representing the tags for the sender. + */ + getSenderTags(sender) { + const rejected = this._rejectIfDisconnected(); + if (rejected) return rejected; + if (this.senderTags && this.senderTags[sender]) { + return Promise.resolve(this.senderTags[sender]); + } + // If no pending promise for this sender, initialize one and trigger a request. + if (!this.pendingSenderTags[sender]) { + this.pendingSenderTags[sender] = []; + this._sendEventSenderTagsRequest(sender); + } + return new Promise((resolve, reject) => { + this.pendingSenderTags[sender].push({ resolve, reject }); + }); + } + + + // --- Internal methods --- + + _connect(url) { + const ws = new WS(url); + ws._url = url; + ws.binaryType = 'arraybuffer'; + ws.onopen = () => this._onOpen(ws); + ws.onmessage = (event) => this._handleMessage(ws, event.data); + ws.onerror = (error) => this._onError(ws, error); + ws.onclose = () => this._onClose(ws); + return ws; + } + + _onOpen(ws) { + this.isOpen = true; + if (this.enableTimeSync) { + this._updateTimeDiff(); + } + this.lastTimeRequest = Date.now() / 1000; + } + + _onError(ws, error) { + if (!error) { + error = new Error("Something went wrong"); + } + // Reject all stored promises. + for (const key in this.storedPromises) { + this.storedPromises[key].reject(error); + } + this.storedPromises = {}; + this.queuedRequests = {}; + + // Reject any pending sender tag promises. + for (const sender in this.pendingSenderTags) { + this.pendingSenderTags[sender].forEach(promiseObj => promiseObj.reject(error)); + delete this.pendingSenderTags[sender]; + } + } + + + _onClose(ws) { + this.isOpen = false; + if (!this.autoReconnect) { + this.disconnected = true; + this._onError(ws, new Error("Connection was closed")); + } else { + // Try to reconnect after a delay + setTimeout(() => { + this.ws = this._connect(ws._url); + }, 1000); + } + } + + _cleanupQueuedRequests() { + for (const key in this.storedPromises) { + this.storedPromises[key].reject(new Error("Connection was closed")); + } + this.storedPromises = {}; + this.queuedRequests = {}; + } + + _handleMessage(ws, message) { + const data = Container.decode(new Uint8Array(message)); + this._parseMessage(data); + } + + _parseMessage(data) { + switch (data.messageType) { + case Container.Type.eError: + if (this.storedPromises[data.error.requestId]) { + const { reject } = this.storedPromises[data.error.requestId]; + delete this.storedPromises[data.error.requestId]; + reject(new Error(data.error.errorMessage)); + } + break; + + case Container.Type.eTimeResponse: + this.timeReceived = Date.now() / 1000; + if (this.storedPromises[data.timeResponse.requestId]) { + const { resolve } = this.storedPromises[data.timeResponse.requestId]; + delete this.storedPromises[data.timeResponse.requestId]; + resolve(data.timeResponse.timestamp); + } + break; + + case Container.Type.eSignalInfoResponse: { + const nodes = []; + this.nameToId = {}; + this.idToName = {}; + for (let i = 0; i < data.signalInfoResponse.name.length; i++) { + const node = { + name: data.signalInfoResponse.name[i], + routing: data.signalInfoResponse.path[i] + }; + if (data.signalInfoResponse.tagMap && data.signalInfoResponse.tagMap[i]) { + node.tags = this._convertTagMap(data.signalInfoResponse.tagMap[i]); + } + this.nameToId[data.signalInfoResponse.name[i]] = data.signalInfoResponse.id[i]; + this.idToName[data.signalInfoResponse.id[i]] = data.signalInfoResponse.name[i]; + nodes.push(node); + } + if (this.storedPromises[data.signalInfoResponse.requestId]) { + const { resolve } = this.storedPromises[data.signalInfoResponse.requestId]; + delete this.storedPromises[data.signalInfoResponse.requestId]; + resolve(nodes); + } + break; + } + + case Container.Type.eCriterionLimitsResponse: + if (this.enableTimeSync) { + data.criterionLimitsResponse.criterionMin += this.timeDiff; + data.criterionLimitsResponse.criterionMax += this.timeDiff; + } + { + const limits = { + startS: data.criterionLimitsResponse.criterionMin, + endS: data.criterionLimitsResponse.criterionMax + }; + if (this.storedPromises[data.criterionLimitsResponse.requestId]) { + const { resolve } = this.storedPromises[data.criterionLimitsResponse.requestId]; + delete this.storedPromises[data.criterionLimitsResponse.requestId]; + resolve(limits); + } + } + break; + + case Container.Type.eVersionResponse: { + const version = parseFloat(data.versionResponse.version); + if (version < 3.0) { + if (this.storedPromises[data.versionResponse.requestId]) { + const { reject } = this.storedPromises[data.versionResponse.requestId]; + delete this.storedPromises[data.versionResponse.requestId]; + reject(new Error("CDP version needs to be 4.3 or newer.")); + } + } else { + if (this.storedPromises[data.versionResponse.requestId]) { + const { resolve } = this.storedPromises[data.versionResponse.requestId]; + delete this.storedPromises[data.versionResponse.requestId]; + resolve(data.versionResponse.version); + } + } + break; + } + + case Container.Type.eSignalDataResponse: { + const dataPoints = []; + let index = 0; + for (const row of data.signalDataResponse.row) { + if (this.enableTimeSync) { + data.signalDataResponse.criterion[index] += this.timeDiff; + } + const signalNames = []; + for (const signalId of row.signalId) { + signalNames.push(this.idToName[signalId]); + } + const value = this._createValue( + signalNames, + row.minValues, + row.maxValues, + row.lastValues + ); + dataPoints.push({ + timestamp: data.signalDataResponse.criterion[index], + value + }); + index++; + } + if (this.storedPromises[data.signalDataResponse.requestId]) { + const { resolve } = this.storedPromises[data.signalDataResponse.requestId]; + delete this.storedPromises[data.signalDataResponse.requestId]; + resolve(dataPoints); + } + break; + } + + case Container.Type.eEventsResponse: { + // Enrich events with a human-readable code description. + if (data.eventsResponse.events && data.eventsResponse.events.length > 0) { + data.eventsResponse.events.forEach(evt => { + evt.codeDescription = this.getEventCodeDescription(evt.code); + // If we already have cached tags for this sender, attach them; + // otherwise, request them. + if (this.senderTags && this.senderTags[evt.sender]) { + evt.tags = this.senderTags[evt.sender]; + } else { + // Request sender tags asynchronously. + this._sendEventSenderTagsRequest(evt.sender); + } + }); + } + if (this.storedPromises[data.eventsResponse.requestId]) { + const { resolve } = this.storedPromises[data.eventsResponse.requestId]; + delete this.storedPromises[data.eventsResponse.requestId]; + resolve(data.eventsResponse.events); + } + break; + } + + + case Container.Type.eCountEventsResponse: { + if (this.storedPromises[data.countEventsResponse.requestId]) { + const { resolve } = this.storedPromises[data.countEventsResponse.requestId]; + delete this.storedPromises[data.countEventsResponse.requestId]; + resolve(data.countEventsResponse.count); + } + break; + } + + case Container.Type.eEventSenderTagsResponse: { + // Get the mapping of sender names to TagMap objects. + const tagsMapping = data.eventSenderTagsResponse.senderTags; + // Iterate over each sender in the mapping. + for (const sender in tagsMapping) { + const tags = this._convertTagMap(tagsMapping[sender]); + this.senderTags[sender] = tags; + // Resolve any pending promises waiting for tags for this sender. + if (this.pendingSenderTags[sender]) { + this.pendingSenderTags[sender].forEach(promiseObj => promiseObj.resolve(tags)); + delete this.pendingSenderTags[sender]; + } + } + break; + } + + + default: + console.error("Unknown message type", data.messageType); + } + } + + _convertTagMap(tagMapObj) { + const result = {}; + if (!tagMapObj) return result; + // If the tag map is nested under 'tags', use that; otherwise, use tagMapObj directly. + const entries = tagMapObj.tags || tagMapObj; + for (const [tagKey, tagInfo] of Object.entries(entries)) { + result[tagKey] = { + value: tagInfo.value, + source: tagInfo.source + }; + } + return result; + } + + _createValue(signalNames, minValues, maxValues, lastValues) { + const value = {}; + for (let i = 0; i < signalNames.length; i++) { + const signalType = this.nameToType[signalNames[i]] || CDPValueType.eDOUBLE; + if (minValues.length === 0 || maxValues.length === 0) { + // Server does not send min and max when they are equal to last + const last = this._valueFromVariant(lastValues[i], signalType); + value[signalNames[i]] = { + min: last, + max: last, + last: last + }; + } else { + value[signalNames[i]] = { + min: this._valueFromVariant(minValues[i], signalType), + max: this._valueFromVariant(maxValues[i], signalType), + last: this._valueFromVariant(lastValues[i], signalType) + }; + } + } + return value; + } + + _valueFromVariant(variant, type) { + if (!variant) return null; + switch (type) { + case CDPValueType.eDOUBLE: + return variant.dValue; + case CDPValueType.eFLOAT: + return variant.fValue; + case CDPValueType.eUINT64: + return variant.ui64Value; + case CDPValueType.eINT64: + return variant.i64Value; + case CDPValueType.eUINT: + return variant.uiValue; + case CDPValueType.eINT: + return variant.iValue; + case CDPValueType.eUSHORT: + return variant.usValue; + case CDPValueType.eSHORT: + return variant.sValue; + case CDPValueType.eUCHAR: + return variant.ucValue; + case CDPValueType.eCHAR: + return variant.cValue; + case CDPValueType.eBOOL: + return variant.bValue; + case CDPValueType.eSTRING: + return variant.strValue; + default: + return null; + } + } + + _sendQueuedRequests() { + for (const requestId in this.queuedRequests) { + const req = this.queuedRequests[requestId]; + if (req === "logged_nodes") { + this._sendLoggedNodesRequest(requestId); + } else if (req === "log_limits") { + this._sendLogLimitsRequest(requestId); + } else if (Array.isArray(req) && req[0] === "node_values") { + this._reqDataPoints(req[1], req[2], req[3], req[4], req[5], requestId); + } else if (req === "api_version") { + this._sendApiVersionRequest(requestId); + } else if (req && req.type === "events") { + this._sendEventsRequest(requestId, req.query); + } else if (req && req.type === "countEvents") { + this._sendCountEventsRequest(requestId, req.query); + } + } + this.queuedRequests = {}; + } + + _getRequestId() { + this.reqId += 1; + return this.reqId; + } + + _timeRequest() { + if (!this.enableTimeSync) return; + if ((Date.now() / 1000) > this.lastTimeRequest + 10) { + this._updateTimeDiff(); + } + } + + _updateTimeDiff() { + if (!this.enableTimeSync) return; + const requestId = this._getRequestId(); + const timeSent = Date.now() / 1000; + this._requestTime(requestId) + .then(timestamp => this._setTimeDiff(timestamp, timeSent)) + .catch(err => { + if (this.storedPromises[requestId]) { + this.storedPromises[requestId].reject(err); + } + }); + } + + _requestTime(reqId) { + if (!this.enableTimeSync) { + return Promise.resolve(0); + } + const requestId = reqId; + this.lastTimeRequest = Date.now() / 1000; + this._sendTimeRequest(requestId); + const promise = new Promise((resolve, reject) => { + this.storedPromises[requestId] = { resolve, reject }; + }); + return promise; + } + + _sendTimeRequest(requestId) { + const container = Container.create(); + container.messageType = Container.Type.eTimeRequest; + container.timeRequest = { requestId }; + const buffer = Container.encode(container).finish(); + this.ws.send(buffer); + } + + _setTimeDiff(timestamp, timeSent) { + if (!this.enableTimeSync) return; + const clientTime = this.timeReceived; + const roundTripTime = clientTime - timeSent; + const serverTime = (timestamp / 1e9) + roundTripTime / 2; + const timeDiff = clientTime - serverTime; + this.roundTripTimes[roundTripTime] = timeDiff; + if (Object.keys(this.roundTripTimes).length !== 3) { + this._updateTimeDiff(); + } else { + const minRoundTrip = Math.min(...Object.keys(this.roundTripTimes).map(Number)); + this.timeDiff = this.roundTripTimes[minRoundTrip]; + this.roundTripTimes = {}; + if (!this.haveSentQueuedReq) { + this._sendQueuedRequests(); + this.haveSentQueuedReq = true; + } + } + } + + _sendLoggedNodesRequest(requestId) { + const container = Container.create(); + container.messageType = Container.Type.eSignalInfoRequest; + container.signalInfoRequest = { requestId }; + const buffer = Container.encode(container).finish(); + this.ws.send(buffer); + } + + _sendLogLimitsRequest(requestId) { + const container = Container.create(); + container.messageType = Container.Type.eCriterionLimitsRequest; + container.criterionLimitsRequest = { requestId }; + const buffer = Container.encode(container).finish(); + this.ws.send(buffer); + } + + _reqDataPoints(nodeNames, startS, endS, noOfDataPoints, limit, requestId) { + const _getDataPoints = (nodeIds) => { + this._sendDataPointsRequest(nodeIds, startS, endS, requestId, noOfDataPoints, limit); + }; + + const rejectRequest = (error) => { + if (this.storedPromises[requestId]) { + const { reject } = this.storedPromises[requestId]; + delete this.storedPromises[requestId]; + reject(error); + } + }; + + if (!(endS < startS)) { + this._requestNodeIds(nodeNames) + .then(nodeIds => _getDataPoints(nodeIds)) + .catch(rejectRequest); + } else { + rejectRequest(new Error("InvalidRequestError on node values request: endS cannot be smaller than startS")); + } + } + + _requestNodeIds(nodeNames) { + return new Promise((resolve, reject) => { + const parseIds = () => { + for (const name of nodeNames) { + if (!(name in this.nameToId)) { + reject(new Error("Node with name " + name + " does not exist.")); + return; + } + } + resolve(nodeNames.map(name => this.nameToId[name])); + }; + + if (nodeNames.every(name => name in this.nameToId)) { + parseIds(); + } else { + this.requestLoggedNodes() + .then(() => parseIds()) + .catch(reject); + } + }); + } + + _sendDataPointsRequest(nodeIds, startS, endS, requestId, noOfDataPoints, limit) { + const container = Container.create(); + container.messageType = Container.Type.eSignalDataRequest; + container.signalDataRequest = { + requestId, + signalId: nodeIds, + limit, + numOfDatapoints: noOfDataPoints, + criterionMin: this.enableTimeSync ? (startS - this.timeDiff) : startS, + criterionMax: this.enableTimeSync ? (endS - this.timeDiff) : endS + }; + const buffer = Container.encode(container).finish(); + this.ws.send(buffer); + } + + _sendApiVersionRequest(requestId) { + const container = Container.create(); + container.messageType = Container.Type.eVersionRequest; + container.versionRequest = { requestId }; + const buffer = Container.encode(container).finish(); + this.ws.send(buffer); + } + + _sendEventsRequest(requestId, query) { + const container = Container.create(); + container.messageType = Container.Type.eEventsRequest; + container.eventsRequest = { requestId, query }; + const buffer = Container.encode(container).finish(); + this.ws.send(buffer); + } + + _sendCountEventsRequest(requestId, query) { + const container = Container.create(); + container.messageType = Container.Type.eCountEventsRequest; + container.countEventsRequest = { requestId, query }; + const buffer = Container.encode(container).finish(); + this.ws.send(buffer); + } + + _sendEventSenderTagsRequest(sender) { + const container = Container.create(); + container.messageType = Container.Type.eEventSenderTagsRequest; + // Use a new requestId so the server can reply with a proper EventSenderTagsResponse. + container.eventSenderTagsRequest = { requestId: this._getRequestId(), sender }; + const buffer = Container.encode(container).finish(); + this.ws.send(buffer); + } + + /** + * Helper method to validate the event query object. + * + * Allowed keys: + * - timeRangeBegin (number) + * - timeRangeEnd (number) + * - limit (number) + * - offset (number) + * - codeMask (number) + * - flags (number) + * - senderConditions (array) + * - dataConditions (object) + * + * @param {Object} query - The event query object provided by the user. + * @throws {Error} If the query contains invalid property names or incorrect types. + */ + _validateEventQuery(query) { + const allowedKeys = { + timeRangeBegin: 'number', + timeRangeEnd: 'number', + limit: 'number', + offset: 'number', + codeMask: 'number', + flags: 'number', + senderConditions: 'array', + dataConditions: 'object' + }; + + Object.keys(query).forEach(key => { + if (!allowedKeys.hasOwnProperty(key)) { + throw new Error( + `Invalid property "${key}" in event query. Allowed properties are: ${Object.keys(allowedKeys).join(', ')}.` + ); + } + const expectedType = allowedKeys[key]; + if (expectedType === 'number' && typeof query[key] !== 'number') { + throw new Error(`Property "${key}" must be a number.`); + } + if (expectedType === 'array' && !Array.isArray(query[key])) { + throw new Error(`Property "${key}" must be an array.`); + } + if (expectedType === 'object' && (typeof query[key] !== 'object' || query[key] === null || Array.isArray(query[key]))) { + throw new Error(`Property "${key}" must be an object.`); + } + }); + } + + /** + * Helper method to build a proper EventQuery object from a simple plain object. + * + * The returned query object is used by `requestEvents()` to query + * the CDPLogger or LogServer for matching events. + * + * @param {Object} query - The simple plain object query. + * @returns {DBMessaging.Protobuf.EventQuery} - The structured EventQuery. + * @throws {Error} If a condition object is missing required properties. + */ + _buildEventQuery(query) { + // Validate the query object before building the EventQuery. + this._validateEventQuery(query); + + // Conditionally include these fields only if the user has set them + const optionalFields = [ + "timeRangeBegin", + "timeRangeEnd", + "codeMask", + "limit", + "offset", + "flags" + ]; + + // Build a base query object that includes only the fields provided + const baseQuery = {}; + optionalFields.forEach(field => { + if (query[field] !== undefined) { + baseQuery[field] = query[field]; + } + }); + + // Build senderConditions if present + if (query.senderConditions && query.senderConditions.length > 0) { + baseQuery.senderConditions = { + conditions: query.senderConditions.map(condition => { + if (typeof condition === 'object' && condition !== null) { + if (!('value' in condition)) { + throw new Error( + `Sender condition object must include a 'value' property. Received: ${JSON.stringify(condition)}` + ); + } + return { + value: String(condition.value), + type: condition.matchType !== undefined + ? condition.matchType + : Client.MatchType.Wildcard + }; + } else { + return { + value: condition, + type: Client.MatchType.Wildcard + }; + } + }) + }; + } + + // Build data conditions if present + if (query.dataConditions) { + const dataConds = {}; + for (const key in query.dataConditions) { + const val = query.dataConditions[key]; + const conditions = []; + + if (Array.isArray(val)) { + for (const item of val) { + if (typeof item === 'object' && item !== null) { + if (!('value' in item)) { + throw new Error( + `Data condition for key "${key}" must include a 'value' property. Received: ${JSON.stringify(item)}` + ); + } + conditions.push({ + value: String(item.value), + type: item.matchType !== undefined + ? item.matchType + : Client.MatchType.Wildcard + }); + } else { + conditions.push({ + value: String(item), + type: Client.MatchType.Wildcard + }); + } + } + } else if (typeof val === 'object' && val !== null) { + if (!('value' in val)) { + throw new Error( + `Data condition for key "${key}" must include a 'value' property. Received: ${JSON.stringify(val)}` + ); + } + conditions.push({ + value: String(val.value), + type: val.matchType !== undefined + ? val.matchType + : Client.MatchType.Wildcard + }); + } else { + conditions.push({ + value: String(val), + type: Client.MatchType.Wildcard + }); + } + + dataConds[key] = { conditions }; + } + baseQuery.dataConditions = dataConds; + } + + return EventQuery.create(baseQuery); + } +} + +// Export the module +const cdplogger = {}; +cdplogger.Client = Client; + +// For Node.js +if (typeof module !== 'undefined' && module.exports) { + module.exports = cdplogger; +} +// For Browser +else if (typeof window !== 'undefined') { + window.cdplogger = cdplogger; +} diff --git a/logger/variant.proto.js b/logger/variant.proto.js new file mode 100644 index 0000000..4ba4e20 --- /dev/null +++ b/logger/variant.proto.js @@ -0,0 +1,57 @@ +const variantProtoText = ` +// This is the .proto file in Google Protocol Buffers format. +// When this file is compiled with Google Protocol Buffers compiler +// (https://code.google.com/p/protobuf/downloads/list), then Java/Python/C++ +// code is generated which contains methods for serializing and deserializing +// the messages contained in this .proto file. + +syntax = "proto2"; + +package ICD.Protobuf; + +option optimize_for = LITE_RUNTIME; +option java_package = "no.icd.dbmessaging"; + +/** CDP value type identifier. */ +enum CDPValueType { + eUNDEFINED = 0; + eDOUBLE = 1; + eUINT64 = 2; + eINT64 = 3; + eFLOAT = 4; + eUINT = 5; + eINT = 6; + eUSHORT = 7; + eSHORT = 8; + eUCHAR = 9; + eCHAR = 10; + eBOOL = 11; + eSTRING = 12; + eUSERTYPE = 100; +} + +/** Common Variant value type for a remote node. */ +message VariantValue { + optional uint32 node_id = 1; + optional double d_value = 2; + optional float f_value = 3; + optional uint64 ui64_value = 4; + optional sint64 i64_value = 5; + optional uint32 ui_value = 6; + optional sint32 i_value = 7; + optional uint32 us_value = 8; // uint used as ushort (which protobuf doesnt have) + optional sint32 s_value = 9; // int used as short + optional uint32 uc_value = 10; // uint used as uchar + optional sint32 c_value = 11; // int used as char + optional bool b_value = 12; + optional string str_value = 13; + optional double timestamp = 14; // Source may provide timestamp for sent value + extensions 100 to max; +} +`; + +if (typeof module !== 'undefined' && module.exports) { + module.exports = variantProtoText; +} else if (typeof window !== 'undefined') { + window.variantProto = variantProtoText; +} diff --git a/test/logger-client.test.js b/test/logger-client.test.js new file mode 100644 index 0000000..55785d8 --- /dev/null +++ b/test/logger-client.test.js @@ -0,0 +1,455 @@ +/*global WebSocket*/ +global.WebSocket = require('ws'); +const studio = require('../index'); +const cdplogger = studio.logger; +const fakeData = require('./loggerFakeData'); + +describe('ClientTester', () => { + let client; + beforeEach(() => { + // Override _connect to return a fake ws object that doesn't actually connect. + cdplogger.Client.prototype._connect = function(url) { + return { + _url: url, + close: jest.fn(), + send: jest.fn() + }; + }; + // Create a new client instance. + client = new cdplogger.Client('127.0.0.1:17000', true); + // By default, disable time sync for most tests. + client.setEnableTimeSync(false); + // Adjust lastTimeRequest so that a new time request would normally be triggered. + client.lastTimeRequest = Date.now() / 1000 - 11; + // Prepopulate lookup maps. + client.idToName = { 0: "Output", 1: "CPULoad" }; + client.nameToId = { "Output": 0, "CPULoad": 1 }; + // Reset reqId so expected request IDs are predictable. + client.reqId = 0; + }); + + afterEach(() => { + client = null; + }); + + test('test_disconnect', () => { + client.ws = { close: jest.fn() }; + client._cleanupQueuedRequests = jest.fn(); + client.disconnect(); + expect(client.ws.close).toHaveBeenCalled(); + expect(client._cleanupQueuedRequests).toHaveBeenCalled(); + }); + + test('test_time_request', () => { + // Enable time sync for this test. + client.setEnableTimeSync(true); + client.isOpen = true; + client._sendTimeRequest = jest.fn(); + client._timeRequest(); + expect(client._sendTimeRequest).toHaveBeenCalledWith(expect.any(Number)); + }); + + test('test_version_request_also_sends_time_request', () => { + client.reqId = 0; + client.setEnableTimeSync(false); + client.isOpen = true; + client._sendApiVersionRequest = jest.fn(); + client.requestApiVersion(); + // With time sync disabled, only _sendApiVersionRequest is called. + expect(client._sendApiVersionRequest).toHaveBeenCalledWith(1); + }); + + test('test_log_limits_request_also_sends_time_request', () => { + client.reqId = 0; + client.setEnableTimeSync(false); + client.isOpen = true; + client._sendLogLimitsRequest = jest.fn(); + client.requestLogLimits(); + expect(client._sendLogLimitsRequest).toHaveBeenCalledWith(1); + }); + + test('test_logged_nodes_request_also_sends_time_request', () => { + client.reqId = 0; + client.setEnableTimeSync(false); + client.isOpen = true; + client._sendLoggedNodesRequest = jest.fn(); + client.requestLoggedNodes(); + expect(client._sendLoggedNodesRequest).toHaveBeenCalledWith(1); + }); + + test('test_data_points_request_also_sends_time_request', done => { + // Enable time sync for this test. + client.setEnableTimeSync(true); + client.reqId = 0; + client.isOpen = true; + client._sendTimeRequest = jest.fn(); + client._sendDataPointsRequest = jest.fn(); + // Call with five explicit parameters: nodeNames, startS, endS, noOfDataPoints, limit. + client.requestDataPoints(["Output", "CPULoad"], 1530613239.0, 1530613270.0, 0, 500); + // Simulate a time response for the time request. + const timeResponse = { + messageType: fakeData.Container.Type.eTimeResponse, + timeResponse: { requestId: 1, timestamp: 1e9 } + }; + client._parseMessage(timeResponse); + // Simulate a data points response. + client._parseMessage(fakeData.createDataPointResponse()); + setImmediate(() => { + expect(client._sendTimeRequest).toHaveBeenCalledWith(1); + expect(client._sendDataPointsRequest).toHaveBeenCalledWith( + [0, 1], // nodeIds + 1530613239.0, // startS + 1530613270.0, // endS + 2, // requestId + 0, // noOfDataPoints + 500 // limit + ); + done(); + }); + }); + + test('test_version_request', done => { + client.reqId = 0; + client.isOpen = true; + client._sendApiVersionRequest = jest.fn(); + client.requestApiVersion() + .then(version => { + expect(version).not.toBeNull(); + done(); + }) + .catch(done.fail); + const response = fakeData.createApiVersionResponse(); + client._parseMessage(response); + }); + + test('test_version_request_error', done => { + client.reqId = 0; + client.isOpen = true; + client._sendApiVersionRequest = jest.fn(); + client.requestApiVersion() + .then(() => done.fail("Promise should not resolve")) + .catch(err => { + expect(err).toBeInstanceOf(Error); + done(); + }); + const response = fakeData.createApiVersionErrorResponse(); + client._parseMessage(response); + }); + + test('test_log_limits_request', done => { + client.reqId = 0; + client.isOpen = true; + client._sendLogLimitsRequest = jest.fn(); + client.requestLogLimits() + .then(limits => { + expect(limits.startS).toBeCloseTo(1529497537.61); + expect(limits.endS).toBeCloseTo(1531389483.02); + done(); + }) + .catch(done.fail); + const response = fakeData.createLogLimitsResponse(); + client._parseMessage(response); + }); + + test('test_log_limits_request_with_time_diff', done => { + // Enable time sync and override _timeRequest to avoid triggering an extra time request. + client.setEnableTimeSync(true); + client._timeRequest = jest.fn(); + client.timeDiff = 10; + client.reqId = 0; + client.isOpen = true; + client._sendLogLimitsRequest = jest.fn(); + client.requestLogLimits() + .then(limits => { + expect(limits.startS).toBeCloseTo(1529497537.61 + 10); + expect(limits.endS).toBeCloseTo(1531389483.02 + 10); + done(); + }) + .catch(done.fail); + const response = fakeData.createLogLimitsResponse(); + client._parseMessage(response); + }); + + test('test_logged_nodes_request', done => { + client.reqId = 0; + client.isOpen = true; + client._sendLoggedNodesRequest = jest.fn(); + client.requestLoggedNodes() + .then(nodes => { + expect(nodes[0].name).toBe("Output"); + expect(nodes[0].routing).toBe("loggerApp.Sine.Output"); + done(); + }) + .catch(done.fail); + const response = fakeData.createLoggedNodesResponse(1); + client._parseMessage(response); + }); + + test('test_data_points_request', done => { + client.reqId = 0; + client.isOpen = true; + client._sendDataPointsRequest = jest.fn(); + client.requestDataPoints(["Output", "CPULoad"], 1531313250.0, 1531461231.0, 500, 0) + .then(dataPoints => { + expect(dataPoints[0].timestamp).toBeCloseTo(1531313250.0); + expect(dataPoints[0].value["Output"].min).toBeCloseTo(0.638855091434); + expect(dataPoints[0].value["Output"].max).toBeCloseTo(0.639955091434); + expect(dataPoints[0].value["Output"].last).toBeCloseTo(0.638855091434); + done(); + }) + .catch(done.fail); + const response = fakeData.createDataPointResponse(); + client._parseMessage(response); + }); + + test('test_data_points_request_error_on_names', done => { + client.reqId = 0; + client.isOpen = true; + delete client.nameToId["Output"]; + for (const id in client.idToName) { + if (client.idToName[id] === "Output") { + delete client.idToName[id]; + } + } + client.requestDataPoints(["Output", "CPULoad"], 1531313250.0, 1531461231.0, 500, 0) + .catch(error => { + expect(error).toBeInstanceOf(Error); + expect(error.message).toMatch(/Output/); + done(); + }); + const response = { + messageType: fakeData.Container.Type.eSignalInfoResponse, + signalInfoResponse: { + requestId: 2, + name: ["CPULoad", "MemUsed", "CDPSignal"], + id: [1, 2, 3], + type: [], + path: [ + "loggerApp.CPULoad", + "loggerApp.MemUsed", + "loggerApp.CDPSignal" + ] + } + }; + client._parseMessage(response); + }); + + test('test_error_response_on_log_limits_request', done => { + client.reqId = 0; + client.isOpen = true; + client.requestLogLimits() + .catch(error => { + expect(error).toBeInstanceOf(Error); + expect(error.message).toBe("Error message"); + done(); + }); + const response = fakeData.createErrorResponse(); + client._parseMessage(response); + }); + + test('test_events_request_with_conditions', () => { + client.reqId = 0; + client.isOpen = true; + client._sendEventsRequest = jest.fn(); + const queryWithConditions = { + timeRangeBegin: 1000, + timeRangeEnd: 2000, + codeMask: 0, + limit: 10, + offset: 0, + flags: 0, + senderConditions: ["*CDPLoggerDemoApp.Sine*"], + dataConditions: { + "Text": "Component was suspended!" + } + }; + client.requestEvents(queryWithConditions); + expect(client._sendEventsRequest).toHaveBeenCalledWith(1, client._buildEventQuery(queryWithConditions)); + }); + + test('test_event_code_description_none', done => { + client.reqId = 0; + client.requestEvents({}) + .then(events => { + expect(events).toHaveLength(1); + expect(events[0].code).toBe(0); + expect(events[0].codeDescription).toBe("None"); + done(); + }) + .catch(done.fail); + const response = { + messageType: fakeData.Container.Type.eEventsResponse, + eventsResponse: { + requestId: 1, + events: [ + { + sender: "Test", + data: {}, + timestampSec: 1234, + id: 999, + code: 0, + status: 0, + logstampSec: 1234 + } + ] + } + }; + client._parseMessage(response); + const tagResponse = fakeData.createEventSenderTagsResponse("Test", { tags: {} }); + client._parseMessage(tagResponse); + }); + + test('test_event_code_description_multiple_flags', done => { + client.reqId = 0; + client.requestEvents({ timeRangeBegin: 1000, timeRangeEnd: 2000, codeMask: 0, limit: 10, offset: 0, flags: 0 }) + .then(events => { + expect(events).toHaveLength(1); + expect(events[0].code).toBe(0x5); + expect(events[0].codeDescription).toBe("AlarmSet + AlarmAck"); + done(); + }) + .catch(done.fail); + const response = { + messageType: fakeData.Container.Type.eEventsResponse, + eventsResponse: { + requestId: 1, + events: [ + { sender: "MultiFlagSensor", data: { key: "value" }, timestampSec: 1500, id: 42, code: 0x5, status: 1, logstampSec: 1500 } + ] + } + }; + client._parseMessage(response); + const tagResponse = fakeData.createEventSenderTagsResponse("MultiFlagSensor", { tags: {} }); + client._parseMessage(tagResponse); + }); + + test('test_realistic_events', done => { + client.reqId = 0; + client.isOpen = true; + // Prepopulate senderTags to avoid waiting for tag lookups. + client.senderTags["CPDLoggerDemoApp.InvalidLicense"] = {}; + client.senderTags["CDPLoggerDemoApp.CPDEventNotification"] = {}; + client.senderTags["CPDLoggerDemoApp"] = {}; + client.requestEvents({ + timeRangeBegin: 1740284000, + timeRangeEnd: 1740284300, + codeMask: 0xFFFFFFFF, + limit: 10, + offset: 0, + flags: 0 + }) + .then(events => { + expect(events).toHaveLength(4); + expect(events[0].sender).toBe("CPDLoggerDemoApp.InvalidLicense"); + expect(events[0].data["Text"]).toBe("Invalid or missing feature license detected."); + expect(events[0].codeDescription).toBe("AlarmSet"); + expect(events[0].status).toBe(1); + expect(events[1].sender).toBe("CDPLoggerDemoApp.CPDEventNotification"); + expect(events[1].data["Text"]).toBe("CDP event notice"); + expect(events[1].codeDescription).toBe("None"); + expect(events[1].status).toBe(3); + expect(events[2].sender).toBe("CPDLoggerDemoApp"); + expect(events[2].data["Text"]).toContain("A component is suspended"); + expect(events[2].codeDescription).toBe("AlarmSet"); + expect(events[2].status).toBe(1); + expect(events[3].sender).toBe("CPDLoggerDemoApp"); + expect(events[3].data["Text"]).toBe("Component was suspended"); + expect(events[3].codeDescription).toBe("None"); + expect(events[3].status).toBe(2); + done(); + }) + .catch(done.fail); + const response = fakeData.createRealisticEventsResponse(1); + client._parseMessage(response); + }); + + test('test_getSenderTags_success', done => { + client.isOpen = true; + client._sendEventSenderTagsRequest = jest.fn(); + const sender = "TestSender"; + const tagPromise = client.getSenderTags(sender); + expect(client._sendEventSenderTagsRequest).toHaveBeenCalledWith(sender); + const response = fakeData.createEventSenderTagsResponse(sender, { tags: { Tag1: { value: "Value1", source: "Source1" } } }); + client._parseMessage(response); + tagPromise.then(tags => { + expect(tags).toEqual({ Tag1: { value: "Value1", source: "Source1" } }); + done(); + }).catch(done.fail); + }); + + test('test_getSenderTags_rejects_on_ws_error', done => { + client.isOpen = true; + const sender = "TestSender"; + const tagPromise = client.getSenderTags(sender); + const error = new Error("WS error"); + client._onError(client.ws, error); + tagPromise.then(() => done.fail("Promise should not resolve")) + .catch(err => { + expect(err).toBe(error); + done(); + }); + }); + + test('test_events_request_attaches_sender_tags', done => { + client.reqId = 0; + // Disable time sync interference. + client._timeRequest = jest.fn(); + client.isOpen = true; + // Override _buildEventQuery to bypass generated code dependency. + client._buildEventQuery = query => query; + + // Capture the client reference locally. + const localClient = client; + + // Override _sendEventsRequest to simulate an asynchronous events response. + localClient._sendEventsRequest = (requestId, query) => { + setImmediate(() => { + const response = { + messageType: fakeData.Container.Type.eEventsResponse, + eventsResponse: { + requestId, + events: [ + { + sender: "TestSender", + data: { Text: "Test event" }, + timestampSec: 1234, + id: 1, + code: 0, + status: 0, + logstampSec: 1234 + } + ] + } + }; + localClient._parseMessage(response); + }); + }; + + // Clear any cached sender tags and pending promises. + localClient.senderTags = {}; + localClient.pendingSenderTags = {}; + + // Override _sendEventSenderTagsRequest to simulate an asynchronous immediate tag response. + localClient._sendEventSenderTagsRequest = sender => { + setImmediate(() => { + const tagResponse = fakeData.createEventSenderTagsResponse( + sender, + { tags: { Tag1: { value: "Value1", source: "Source1" } } } + ); + localClient._parseMessage(tagResponse); + }); + }; + + localClient.requestEvents({}) + .then(events => { + try { + expect(events).toHaveLength(1); + expect(events[0].tags).toEqual({ Tag1: { value: "Value1", source: "Source1" } }); + done(); + } catch (err) { + done(err); + } + }) + .catch(err => done(err)); + }); +}); diff --git a/test/loggerFakeData.js b/test/loggerFakeData.js new file mode 100644 index 0000000..551a8b8 --- /dev/null +++ b/test/loggerFakeData.js @@ -0,0 +1,180 @@ +// fakeData.js + +const Container = { + Type: { + eSignalInfoRequest: 1, + eSignalInfoResponse: 2, + eSignalDataRequest: 3, + eSignalDataResponse: 4, + eCriterionLimitsRequest: 5, + eCriterionLimitsResponse: 6, + eVersionRequest: 7, + eVersionResponse: 8, + eError: 9, + eTimeRequest: 10, + eTimeResponse: 11, + eEventSenderTagsRequest: 12, + eEventSenderTagsResponse: 13, + eCountEventsRequest: 14, + eCountEventsResponse: 15, + eEventsRequest: 16, + eEventsResponse: 17 + } +}; + +function createApiVersionResponse() { + return { + messageType: Container.Type.eVersionResponse, + versionResponse: { requestId: 1, version: "3.0" } + }; +} + +function createApiVersionErrorResponse() { + return { + messageType: Container.Type.eVersionResponse, + versionResponse: { requestId: 1, version: "1.0" } + }; +} + +function createLogLimitsResponse() { + return { + messageType: Container.Type.eCriterionLimitsResponse, + criterionLimitsResponse: { + requestId: 1, + criterionMin: 1529497537.61, + criterionMax: 1531389483.02 + } + }; +} + +function createLoggedNodesResponse(requestId) { + return { + messageType: Container.Type.eSignalInfoResponse, + signalInfoResponse: { + requestId: requestId, + name: ["Output", "CPULoad", "MemUsed", "CDPSignal"], + id: [0, 1, 2, 3], + type: [], + path: [ + "loggerApp.Sine.Output", + "loggerApp.CPULoad", + "loggerApp.MemUsed", + "loggerApp.CDPSignal" + ] + } + }; +} + +function createDataPointResponse() { + return { + messageType: Container.Type.eSignalDataResponse, + signalDataResponse: { + requestId: 1, + criterion: [1531313250.0, 1530613239.063119], + row: [ + { + signalId: [0, 1], + minValues: [{ dValue: 0.638855091434 }, { dValue: 0.538855091434 }], + maxValues: [{ dValue: 0.639955091434 }, { dValue: 0.53955091434 }], + lastValues: [{ dValue: 0.638855091434 }, { dValue: 0.538855091434 }] + }, + { + signalId: [0, 1], + minValues: [{ dValue: 0.738855091434 }, { dValue: 0.338855091434 }], + maxValues: [{ dValue: 0.739955091434 }, { dValue: 0.358855091434 }], + lastValues: [{ dValue: 0.738855091434 }, { dValue: 0.348855091434 }] + } + ] + } + }; +} + +function createErrorResponse() { + return { + messageType: Container.Type.eError, + error: { + requestId: 1, + errorMessage: "Error message", + errorCode: 1234567 + } + }; +} + +function createRealisticEventsResponse(requestId = 1) { + return { + messageType: Container.Type.eEventsResponse, + eventsResponse: { + requestId, + events: [ + { + sender: "CPDLoggerDemoApp.InvalidLicense", + data: { + Text: "Invalid or missing feature license detected." + }, + timestampSec: 1740284241, + id: 101, + code: 0x1, + status: 1, + logstampSec: 1740284241 + }, + { + sender: "CDPLoggerDemoApp.CPDEventNotification", + data: { + Text: "CDP event notice" + }, + timestampSec: 1740284167, + id: 102, + code: 0, + status: 3, + logstampSec: 1740284167 + }, + { + sender: "CPDLoggerDemoApp", + data: { + Text: "A component is suspended" + }, + timestampSec: 1740284145, + id: 103, + code: 0x1, + status: 1, + logstampSec: 1740284145 + }, + { + sender: "CPDLoggerDemoApp", + data: { + Text: "Component was suspended" + }, + timestampSec: 1740284090, + id: 104, + code: 0, + status: 2, + logstampSec: 1740284090 + } + ] + } + }; +} + +function createEventSenderTagsResponse(sender, tagMap) { + return { + messageType: Container.Type.eEventSenderTagsResponse, + eventSenderTagsResponse: { + senderTags: { + [sender]: tagMap + } + } + }; +} + + +module.exports = { + createApiVersionResponse, + createApiVersionErrorResponse, + createLogLimitsResponse, + createLoggedNodesResponse, + createDataPointResponse, + createErrorResponse, + createRealisticEventsResponse, + createEventSenderTagsResponse, + Container +};