Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,599 changes: 1,599 additions & 0 deletions tests/unit/handlers/metrics.handlers.test.js

Large diffs are not rendered by default.

65 changes: 65 additions & 0 deletions tests/unit/routes/metrics.routes.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
'use strict'

const test = require('brittle')
const { testModuleStructure, testHandlerFunctions, testOnRequestFunctions } = require('../helpers/routeTestHelpers')
const { createRoutesForTest } = require('../helpers/mockHelpers')

const ROUTES_PATH = '../../../workers/lib/server/routes/metrics.routes.js'

test('metrics routes - module structure', (t) => {
testModuleStructure(t, ROUTES_PATH, 'metrics')
t.pass()
})

test('metrics routes - route definitions', (t) => {
const routes = createRoutesForTest(ROUTES_PATH)

const routeUrls = routes.map(route => route.url)
t.ok(routeUrls.includes('/auth/metrics/hashrate'), 'should have hashrate route')
t.ok(routeUrls.includes('/auth/metrics/consumption'), 'should have consumption route')
t.ok(routeUrls.includes('/auth/metrics/efficiency'), 'should have efficiency route')
t.ok(routeUrls.includes('/auth/metrics/miner-status'), 'should have miner-status route')
t.ok(routeUrls.includes('/auth/metrics/power-mode'), 'should have power-mode route')
t.ok(routeUrls.includes('/auth/metrics/power-mode/timeline'), 'should have power-mode/timeline route')
t.ok(routeUrls.includes('/auth/metrics/temperature'), 'should have temperature route')
t.ok(routeUrls.includes('/auth/metrics/containers/:id'), 'should have container telemetry route')
t.ok(routeUrls.includes('/auth/metrics/containers/:id/history'), 'should have container history route')

t.pass()
})

test('metrics routes - HTTP methods', (t) => {
const routes = createRoutesForTest(ROUTES_PATH)

routes.forEach(route => {
t.is(route.method, 'GET', `route ${route.url} should be GET`)
})

t.pass()
})

test('metrics routes - schema integration', (t) => {
const routes = createRoutesForTest(ROUTES_PATH)

const routesWithSchemas = routes.filter(route => route.schema)
routesWithSchemas.forEach(route => {
t.ok(route.schema, `route ${route.url} should have schema`)
if (route.schema.querystring) {
t.ok(typeof route.schema.querystring === 'object', `route ${route.url} querystring should be object`)
}
})

t.pass()
})

test('metrics routes - handler functions', (t) => {
const routes = createRoutesForTest(ROUTES_PATH)
testHandlerFunctions(t, routes, 'metrics')
t.pass()
})

test('metrics routes - onRequest functions', (t) => {
const routes = createRoutesForTest(ROUTES_PATH)
testOnRequestFunctions(t, routes, 'metrics')
t.pass()
})
69 changes: 67 additions & 2 deletions workers/lib/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,17 @@ const ENDPOINTS = {

SITE_STATUS_LIVE: '/auth/site/status/live',

// Metrics endpoints
METRICS_HASHRATE: '/auth/metrics/hashrate',
METRICS_CONSUMPTION: '/auth/metrics/consumption',
METRICS_EFFICIENCY: '/auth/metrics/efficiency',
METRICS_MINER_STATUS: '/auth/metrics/miner-status',
METRICS_POWER_MODE: '/auth/metrics/power-mode',
METRICS_POWER_MODE_TIMELINE: '/auth/metrics/power-mode/timeline',
METRICS_TEMPERATURE: '/auth/metrics/temperature',
METRICS_CONTAINER_TELEMETRY: '/auth/metrics/containers/:id',
METRICS_CONTAINER_HISTORY: '/auth/metrics/containers/:id/history',

// Alerts endpoints
ALERTS_SITE: '/auth/alerts/site',
ALERTS_HISTORY: '/auth/alerts/history'
Expand Down Expand Up @@ -245,12 +256,60 @@ const POWER_MODES = {
SLEEP: 'sleep'
}

const METRICS_TIME = {
ONE_DAY_MS: 24 * 60 * 60 * 1000,
TWO_DAYS_MS: 2 * 24 * 60 * 60 * 1000,
NINETY_DAYS_MS: 90 * 24 * 60 * 60 * 1000,
THREE_HOURS_MS: 3 * 60 * 60 * 1000,
ONE_MONTH_MS: 30 * 24 * 60 * 60 * 1000
}

const METRICS_DEFAULTS = {
TIMELINE_LIMIT: 10080,
CONTAINER_HISTORY_LIMIT: 10080
}

const MINER_CATEGORIES = {
LOW: 'low',
NORMAL: 'normal',
HIGH: 'high',
SLEEP: 'sleep',
OFFLINE: 'offline',
ERROR: 'error',
NOT_MINING: 'notMining',
MAINTENANCE: 'maintenance'
}

const LOG_KEYS = {
STAT_3H: 'stat-3h',
STAT_5M: 'stat-5m'
}

const WORKER_TAGS = {
MINER: 't-miner',
CONTAINER: 't-container'
}

const DEVICE_LIST_FIELDS = {
id: 1, type: 1, code: 1, ip: 1, tags: 1, info: 1, rack: 1
}

const AGGR_FIELDS = {
HASHRATE_SUM: 'hashrate_mhs_5m_sum_aggr',
SITE_POWER: 'site_power_w',
ENERGY_AGGR: 'energy_aggr',
ACTIVE_ENERGY_IN: 'active_energy_in_aggr',
UTE_ENERGY: 'ute_energy_aggr'
UTE_ENERGY: 'ute_energy_aggr',
EFFICIENCY: 'efficiency_w_ths_avg_aggr',
POWER_MODE_GROUP: 'power_mode_group_aggr',
STATUS_GROUP: 'status_group_aggr',
TEMP_MAX: 'temperature_c_group_max_aggr',
TEMP_AVG: 'temperature_c_group_avg_aggr',
TYPE_CNT: 'type_cnt',
OFFLINE_CNT: 'offline_cnt',
SLEEP_CNT: 'power_mode_sleep_cnt',
MAINTENANCE_CNT: 'maintenance_type_cnt',
CONTAINER_SPECIFIC_STATS: 'container_specific_stats_group_aggr'
}

const PERIOD_TYPES = {
Expand Down Expand Up @@ -319,12 +378,18 @@ module.exports = {
NON_METRIC_KEYS,
BTC_SATS,
RANGE_BUCKETS,
METRICS_TIME,
METRICS_DEFAULTS,
MINER_CATEGORIES,
LOG_KEYS,
WORKER_TAGS,
SEVERITY_LEVELS,
ALERTS_DEFAULT_LIMIT,
ALERTS_MAX_SITE_LIMIT,
ALERTS_MAX_HISTORY_LIMIT,
SITE_ALERTS_FILTER_FIELDS,
SITE_ALERTS_SEARCH_FIELDS,
HISTORY_FILTER_FIELDS,
HISTORY_SEARCH_FIELDS
HISTORY_SEARCH_FIELDS,
DEVICE_LIST_FIELDS
}
112 changes: 112 additions & 0 deletions workers/lib/metrics.utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
'use strict'

const { getStartOfDay } = require('./period.utils')
const { METRICS_TIME, LOG_KEYS } = require('./constants')

/**
* Parse timestamp from RPC entry.
* With groupRange, ts may be a range string like "1770854400000-1771459199999".
* Extracts the start of the range in that case.
*/
function parseEntryTs (ts) {
if (typeof ts === 'number') return ts
if (typeof ts === 'string') {
const dashIdx = ts.indexOf('-')
if (dashIdx > 0) return Number(ts.slice(0, dashIdx))
return Number(ts)
}
return null
}

function validateStartEnd (req) {
const start = Number(req.query.start)
const end = Number(req.query.end)

if (!start || !end) {
throw new Error('ERR_MISSING_START_END')
}

if (start >= end) {
throw new Error('ERR_INVALID_DATE_RANGE')
}

return { start, end }
}

function * iterateRpcEntries (results) {
for (const res of results) {
if (!res || res.error) continue
const data = Array.isArray(res) ? res : (res.data || res.result || [])
if (!Array.isArray(data)) continue
for (const entry of data) {
if (!entry || entry.error) continue
yield entry
}
}
}

function forEachRangeAggrItem (entry, callback) {
if (!entry) return
const items = entry.data || entry.items || entry
if (Array.isArray(items)) {
for (const item of items) {
const ts = getStartOfDay(parseEntryTs(item.ts || item.timestamp))
if (!ts) continue
callback(ts, item.val || item)
}
} else if (typeof items === 'object') {
for (const [key, val] of Object.entries(items)) {
const ts = getStartOfDay(parseEntryTs(Number(key)))
if (!ts) continue
callback(ts, val)
}
}
}

function sumObjectValues (obj) {
if (!obj || typeof obj !== 'object') return 0
return Object.values(obj).reduce((sum, val) => sum + (Number(val) || 0), 0)
}

/**
* Extract container name from a device key.
* Strips the last dash-separated segment (assumed to be position/index).
* e.g. "bitdeer-9a-miner1" -> "bitdeer-9a"
* NOTE: This is a heuristic based on naming convention in power_mode_group_aggr data.
* Device keys are identifiers from aggregated data, not auto-generated IDs.
*/
function extractContainerFromMinerKey (deviceKey) {
const lastDash = deviceKey.lastIndexOf('-')
return lastDash > 0 ? deviceKey.slice(0, lastDash) : deviceKey
}

function resolveInterval (start, end, requested) {
if (requested) return requested
const range = end - start
if (range <= METRICS_TIME.TWO_DAYS_MS) return '1h'
if (range <= METRICS_TIME.NINETY_DAYS_MS) return '1d'
return '1w'
}

function getIntervalConfig (interval) {
switch (interval) {
case '1h':
return { key: LOG_KEYS.STAT_3H, groupRange: null }
case '1w':
return { key: LOG_KEYS.STAT_3H, groupRange: '1W' }
case '1d':
default:
return { key: LOG_KEYS.STAT_3H, groupRange: '1D' }
}
}

module.exports = {
parseEntryTs,
validateStartEnd,
iterateRpcEntries,
forEachRangeAggrItem,
sumObjectValues,
extractContainerFromMinerKey,
resolveInterval,
getIntervalConfig
}
Loading