feat: move instrument onboarding to YAML config

Replace DB-backed instrument upserts with app.yaml-driven config loading, matching, and translator application in the ingestion workflow. Also add serial-port connector support, startup validation tooling, and migration tracking updates to keep runtime behavior and docs aligned.
This commit is contained in:
mahdahar 2026-04-06 16:50:17 +07:00
parent b76f57e1b9
commit dc6cca71cf
30 changed files with 1411 additions and 144 deletions

View File

@ -24,7 +24,7 @@
## Environment & Secrets
- Node 20+ is assumed because the code uses optional chaining, `String.raw`, and other modern primitives; keep the same runtime for development and CI.
- All ports, DB paths, and CLQMS credentials are wired through `middleware/config/default.js` and its environmental overrides (e.g., `HTTP_JSON_PORT`, `CLQMS_TOKEN`, `WORKER_BATCH_SIZE`).
- All ports, DB paths, and CLQMS credentials are sourced from `middleware/config/app.yaml` (loaded by `middleware/config/default.js`) as the single runtime config file.
- Treat `CLQMS_TOKEN`, database files, and other secrets as environment-provided values; never embed them in checked-in files.
- `middleware/data/workstation.sqlite` is the runtime database. Dont delete or reinitialize it from the repository tree unless part of an explicit migration/backup operation.

76
docs/MES.PAR Normal file
View File

@ -0,0 +1,76 @@
Prestige24i
IDEE =(14,R,Z,|).
DEF1 IDEE =(14,R,Z,F).
PATNUM =(14,R,Z,|).
PATID =(14,R,Z,|).
SKIP =(250,R,Z,|).
DEF0 SKIP =(50,R,Z,^).
LSTNAME =(20,R,S,|).
FSTNAME =(20,R,S,|).
RESULT =(10,R,Z,^).
RESFLAG =(3,R,S,|).
CHEMNUM =(20,R,S,^).
SEX =(1,R,Z,F).
BIRYEAR =(4,R,Z,F).
BIRMONT =(2,R,Z,F).
BIRDAY =(2,R,Z,F).
ASPYEAR =(4,R,Z,F).
ASPMONT =(2,R,Z,F).
ASPDAY =(2,R,Z,F).
ASPHOUR =(2,R,Z,F).
ASPMIN =(2,R,Z,F).
ASPSEC =(2,R,Z,F).
SPEC1 =(14,R,Z,|).
SPEC4 =(1,R,Z,|).
TUBTYPE =(10,R,Z,|).
SPEC7 =(14,R,S,|).
URGENT =(1,R,Z,F).
DOCTOR =(20,R,S,|).
SEQNUM =(6,R,S,|).
CTLNUM =(3,R,Z,|).
S9 ="Header_record_from_WST".
MESSAGE_I =STX,SP,1,"H|\^&|||WST^P1|||||Prestige24i^System1||P|1|",{SPEC7},CR,ETX,SP,2,CR,LF.
S9 ="Patient_record_from_WST".
MESSAGE_D =STX,SP,1,"P|",{SPEC1},"|",{PATNUM},"|||",
{LSTNAME},"^",{FSTNAME},"||",{BIRYEAR},{BIRMONT},{BIRDAY},"|",{SEX},"|||||",{DOCTOR},"|",CR,ETX,SP,2,CR,LF.
S9 ="Order_record_from_WST".
MESSAGE_Y =STX,SP,1,"O|1|",{IDEE},"||", 30("^^^",{CHEMNUM},"\"),{URGENT},
"||||||",{SPEC4},"||||",{TUBTYPE},"||||||||||O|",CR,ETX,SP,2,CR,LF.
S9 ="Terminator_record_from_WST".
MESSAGE_F =STX,SP,1,"L|1|N",CR,ETX,SP,2,CR,LF.
S9 ="Order_record_from_Instrument:QC_Or_Patient_Result".
S4 =[|Q|||,?=SA,ELSE=SB].
S9 ="QC_Order_record_from_Instrument".
SA =STX,SP,1,"O|",{SKIP},"|",{SKIP},"|^1^",{CTLNUM},"|".
S9 ="Patient_Order_record_from_Instrument".
SB =STX,SP,1,"O|",{SKIP},"|",{IDEE},"|",{DEF0},"^",{DEF0},"^",{SEQNUM}.
S9 ="Result_message_from_Instrument".
MESSAGE_R =STX,SP,1,"R|",{SKIP},"|^^^",{CHEMNUM},{SKIP},"|",
{RESULT},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",
,{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",
{ASPYEAR},{ASPMONT},{ASPDAY},{ASPHOUR},{ASPMIN},{ASPSEC}.
S9 ="Query_record_from_Instrument".
MESSAGE_Q =STX,SP,1,"Q|",{DEF0},"|^",{SPEC1}.
S9 ="Patient_message_from_Instrument".
S2 =STX,:,ETX,SP,2,CR,LF.
S9 ="Manufacturers_record_from_Instrument".
S7 =STX,:,ETX,SP,2,CR,LF.
S9 ="Scientific_record_from_Instrument".
MESSAGE_S =STX,:,ETX,SP,2,CR,LF.
S9 ="Comment_record_from_Instrument".
MESSAGE_C =STX,SP,1,"C|",{SKIP},"|",{SKIP},"|",{RESFLAG},"|".

28
docs/MES2.PAR Normal file
View File

@ -0,0 +1,28 @@
Jokok_EXD
IDEE =(17,R,Z,F).
CTLNUM =(2,R,S,F).
CHEMNUM =(2,R,S,F).
RESULT =(5,R,S,|).
RESFLAG =(1,R,S,F).
TUBTYPE =(1,R,S,F).
NUMRACK =(2,R,S,F).
POSRACK =(2,R,S,F).
ASPYEAR =(4,R,Z,F).
ASPMONT =(2,R,Z,F).
ASPDAY =(2,R,Z,F).
ASPHOUR =(2,R,Z,F).
ASPMIN =(2,R,Z,F).
ASPSEC =(2,R,Z,F).
S9 ="Result_Message:QC_Or_Sample_Results".
MESSAGE_R =STX,[CONTROL,?=SA,ELSE=SB].
S9 ="Result_Message:QC_Result".
SA ="",{ASPYEAR},{ASPMONT},{ASPDAY},{ASPHOUR},{ASPMIN},{ASPSEC},{NUMRACK},{POSRACK},SP,12,
{CTLNUM},SP,8,{TUBTYPE},SP,1,3!({CHEMNUM},{RESULT},SP,1,{RESFLAG}),ETX.
S9 ="Result_Message:Sample_Result".
SB ="",{ASPYEAR},{ASPMONT},{ASPDAY},{ASPHOUR},{ASPMIN},{ASPSEC},{NUMRACK},{POSRACK},SP,4
{IDEE},SP,1,{TUBTYPE},SP,1,3!({CHEMNUM},{RESULT},SP,1,{RESFLAG}),ETX.

140
docs/MES3.PAR Normal file
View File

@ -0,0 +1,140 @@
BS380
PATNUM =(16,R,Z,|).
LSTNAME =(30,R,S,^).
FSTNAME =(30,R,S,|).
BIRYEAR =(4,R,Z,F).
BIRMONT =(2,R,Z,F).
BIRDAY =(2,R,Z,F).
LOCATIO =(10,L,S,|).
COLYEAR =(4,R,Z,F).
COLMONT =(2,R,Z,F).
COLDAY =(2,R,Z,F).
COLHOUR =(2,R,Z,F).
COLMIN =(2,R,Z,F).
COLSEC =(2,R,Z,F).
TUBTYPE =(30,R,Z,|).
ASPYEAR =(4,R,Z,F).
ASPMONT =(2,R,Z,F).
ASPDAY =(2,R,Z,F).
ASPHOUR =(2,R,Z,F).
ASPMIN =(2,R,Z,F).
ASPSEC =(2,R,Z,F).
URGENT =(1,R,S,|).
DOCTOR =(10,L,S,|).
IDEE =(16,R,Z,|).
DEF0 IDEE =(8,R,Z,F).
SEX =(1,R,Z,|).
SPEC0 =(20,L,S,|).
SPEC1 =(50,L,S,|).
SPEC2 =(2,L,S,|).
SPEC3 =(2,L,S,|).
SPEC4 =(30,L,S,|).
SPEC5 =(80,L,S,|).
SPEC6 =(6,L,S,|).
SPEC7 =(20,L,S,|).
SPEC8 =(20,L,S,|).
SPEC9 =(20,L,S,|).
SPEC10 =(4,L,S,|).
SPEC11 =(1,R,Z,|).
SPEC12 =(30,L,S,|).
SPEC13 =(50,L,S,|).
SKIP =(99,L,S,|).
CHEMNUM =(30,R,S,|).
PROFIL =(30,R,S,^).
RESULT =(16,L,S,|).
RESFLAG =(15,L,S,|).
DEF1 RESULT =(4,R,Z, ).
S9 ="::HL7_MINIMAL_LOW_LAYER_PROTOCOL::".
S9 ="Request_message_from_TDW".
MESSAGE_W =SP,1,"MSH|^~\&|TD|LISWST|Mindray|BS-380|",{SPEC0},"||DSR^Q03|",{SPEC4},"|P|2.3.1||||||ASCII|||",CR,
"MSA|",{SPEC2},"|",{SPEC4},"|",{SPEC5},"|||",{SPEC8},"|",CR,
"ERR|0|",CR,
"QAK|SR|",{SPEC9},"|",CR,
"QRD|","FSE_Warning:_Both_QRD_and_QRF_segment_will_be_regenerated_by_TDW_-_DO_NOT_MODIFY",CR,
"QRF|","FSE_Warning:_Both_QRD_and_QRF_segment_will_be_regenerated_by_TDW_-_DO_NOT_MODIFY",CR,
"DSP|1||",{PATNUM},"|||",CR,
"DSP|2|||||",CR,
"DSP|3||",{LSTNAME},SP,1,{FSTNAME},"|||",CR,
"DSP|4||",{BIRYEAR},{BIRMONT},{BIRDAY},"000000|||",CR,
"DSP|5||",{SEX},"|||",CR,
"DSP|6|||||",CR,
"DSP|7|||||",CR,
"DSP|8|||||",CR,
"DSP|9|||||",CR,
"DSP|10|||||",CR,
"DSP|11|||||",CR,
"DSP|12|||||",CR,
"DSP|13|||||",CR,
"DSP|14|||||",CR,
"DSP|15|||||",CR,
"DSP|16|||||",CR,
"DSP|17||own|||",CR,
"DSP|18|||||",CR,
"DSP|19|||||",CR,
"DSP|20|||||",CR,
"DSP|21||",{DEF0},"|||",CR,
"DSP|22||",{DEF0},"|||",CR,
"DSP|23||",{COLYEAR},{COLMONT},{COLDAY},{COLHOUR},{COLMIN},{COLSEC},"|||",CR,
"DSP|24||N|||",CR,
"DSP|25|||||",CR,
"DSP|26||",{TUBTYPE},"|||",CR,
"DSP|27||",{DOCTOR},"|||",CR,
"DSP|28||",{LOCATIO},"|||",CR,
99("DSP|",{SPEC10},"||",{CHEMNUM},"^^^|||",CR),
"DSC||",CR,SP,2.
S9 ="Query_response_message_from_TDW".
SJ =SP,1,"MSH|^~\&|TD|LISWST|Mindray|BS-380|",{SPEC0},"||QCK^Q02|",{SPEC4},"|P|2.3.1||||||ASCII|||",CR,
"MSA|",{SPEC2},"|",{SPEC4},"|",{SPEC5},"|||",{SPEC8},"|",CR,
"ERR|0|",CR,
"QAK|SR|",{SPEC9},"|",CR,SP,2.
S9 ="HL7_Acknowledment_message_from_TDW".
MESSAGE_Z =SP,1
"MSH|^~\&|TD|LISWST|Mindray|BS-380|",{SPEC0},"||ACK^R01|",{SPEC4},"|P|2.3.1||||||ASCII|||",CR,
"MSA|",{SPEC2},"|",{SPEC4},"|",{SPEC5},"|||",{SPEC8},"|",CR,SP,2.
S9 ="MSH_segment_of_HL7_message_from_INSTRUMENT".
MESSAGE_I ="MSH|^~\&|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SPEC1},
"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SPEC11}.
S9 ="QRD_segment_of_HL7_message_from_INSTRUMENT".
MESSAGE_Q ="QRD|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SPEC4},CR.
S9 ="QRF_segment_of_HL7_message_from_INSTRUMENT".
SF ="QRF|",:,CR.
S9 ="MSA_segment_of_HL7_message_from_INSTRUMENT".
MESSAGE_E ="MSA|",{SPEC2},"|".
S9 ="ERR_segment_of_HL7_message_from_INSTRUMENT.".
SI ="ERR|",{SPEC5},CR.
S9 ="OBR_segment_of_HL7_message_from_INSTRUMENT".
MESSAGE_Y ="OBR|",{SKIP},"|",{SKIP},"|",{IDEE},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",
{ASPYEAR},{ASPMONT},{ASPDAY},{ASPHOUR},{ASPMIN},{ASPSEC},"|",
{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",
{SKIP},"|",{SKIP},"|",{SKIP},"|",{TUBTYPE}.
S9 ="OBX_segment_of_HL7_message_from_INSTRUMENT".
MESSAGE_R ="OBX|",{SKIP},"|",{SKIP},"|",{CHEMNUM},"|",{SKIP},"|",{RESULT},"|",{SKIP},"|",{SKIP},"|",{RESFLAG},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP}.
S9 ="PID_segment_of_HL7_message_from_INSTRUMENT.".
MESSAGE_D ="PID|",:,CR.
S9 ="PV1_segment_of_HL7_message_from_INSTRUMENT.".
MESSAGE_S ="PV1|",:,CR.
S9 ="ORC_segment_of_HL7_message_from_INSTRUMENT.".
MESSAGE_F ="ORC|",:,CR.
S9 ="NTE_segment_of_HL7_message_from_INSTRUMENT.".
MESSAGE_C ="NTE|",:,CR.
S9 ="OBR_segment_of_QC_result_message_from_INSTRUMENT".
MESSAGE_N ="OBR|",{SKIP},"|",{CHEMNUM},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{ASPYEAR},{ASPMONT},{ASPDAY},{ASPHOUR},{ASPMIN},{ASPSEC},"|",
{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SPEC12},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SKIP},"|",{SPEC13}.

35
docs/MES4.PAR Normal file
View File

@ -0,0 +1,35 @@
MONO
ASPYEAR =(4,R,Z,F).
ASPMONT =(3,R,Z,F).
ASPDAY =(2,R,Z,F).
ASPHOUR =(2,R,Z,F).
ASPMIN =(2,R,Z,F).
SEQNUM =(4,R,Z,F).
DEF0 SEQNUM =(6,R,Z,F).
CTLNUM =(2,R,Z,F).
IDEE =(8,L,S,F).
RESULT =(6,R,S,F).
RESFLAG =(1,R,S,F).
SPEC2 =(1,R,S,F).
SKIP =(6,R,S,F).
TUBTYPE =(1,R,Z,F).
DEF1 SKIP =(4,R,S,F).
CHEMNUM =(4,R,S,F).
S9 ="Sample_or_Control_Result_Message?".
MESSAGE_R =STX,SP,20,[T,51=S0,ELSE=S1].
S9 ="Control_Result_Message".
S0 =STX,SP,32,{ASPYEAR},"/",{ASPMONT},
"/",{ASPDAY},SP,1,{ASPHOUR},":",{ASPMIN},
,SP,1,{CTLNUM},SP,1,{SEQNUM},SP,1,{SKIP},SP,12,
STX,6!({CHEMNUM},SP,2,{SPEC2},{RESULT},{RESULT},SP,6),SP,1,
STX,6!({CHEMNUM},SP,2,{SPEC2},{RESULT},{RESULT},SP,6),SP,1,
STX,1!({CHEMNUM},SP,2,{SPEC2},{RESULT},{RESULT},SP,6).
S9 ="Patient_Result_Message".
S1 =STX,{TUBTYPE},{ASPMONT},"/",{ASPDAY},"/",{ASPYEAR},SP,2,
{ASPHOUR},":",{ASPMIN},SP,2,{IDEE},",",SP,11,CRLF,
12!({CHEMNUM},SP,1,{RESULT},SP,15,CRLF).

197
docs/user_manual.md Normal file
View File

@ -0,0 +1,197 @@
# TinyLink User Manual
Welcome to TinyLink: the friendly lab middleware that never sleeps, rarely panics, and keeps your results moving even when downstream systems are having a bad day.
## What TinyLink Can Do
TinyLink sits between laboratory instruments and CLQMS, then handles the heavy lifting:
- Receives messages from instruments over three connector types:
- `http-json` (HTTP endpoint)
- `hl7-tcp` (TCP socket)
- `astm-serial` (physical serial port/COM)
- Parses incoming payloads and normalizes them into one canonical JSON format.
- Stores raw and normalized data in SQLite for durability.
- Deduplicates repeated payloads using a hash key.
- Sends results to CLQMS with automatic retry and backoff.
- Moves non-deliverable payloads to dead letter for review.
- Exposes operational endpoints for health and metrics.
Think of it as a reliable translator + traffic controller for instrument data.
## Default Ports and Endpoints
By default (from `middleware/config/app.yaml`):
- Instrument config + health + metrics API: `4001` (from `host.port`)
- Instrument connectors are configured per instrument entity (`inst1`, `inst2`, ...)
Useful endpoints:
- `GET http://localhost:4001/health`
- `GET http://localhost:4001/health/ready`
- `GET http://localhost:4001/metrics`
- `POST http://localhost:3001/messages` (for JSON instrument payloads)
## Add a New Instrument (New "Inst")
TinyLink now uses a single file-based configuration. There is no `POST /instruments` write flow.
To add an instrument, keep one config file in:
```text
middleware/
config/
app.yaml
```
### Step 1: Edit `app.yaml`
Example:
```yaml
host:
url: http://localhost:4000/api/results
apikey: ""
port: 4001
inst1:
enabled: true
connector:
type: serial
port: COM1
baudRate: 9600
config:
location: lab-a
translator:
parser: astm
forceInstrumentId: true
meta:
profile: astm-default
```
What it does:
- `host` contains upstream endpoint and API settings (`url`, `apikey`, `port`).
- Every top-level entity other than `host` is an instrument (`inst1`, `inst2`, ...).
- `connector` is embedded per instrument, so each instrument has its own connector type/settings.
- `translator` stays embedded per instrument, so each instrument can define parser and metadata.
### Step 2: Restart TinyLink
TinyLink validates instrument files before startup. Restart after adding or editing files.
Use this preflight command before `npm start`:
```powershell
npm run instrument:check
```
### Step 3: Verify instrument is loaded
You can still use read-only endpoints:
```powershell
curl http://localhost:4001/instruments
curl http://localhost:4001/instruments/inst1
```
If your instrument does not appear, run `npm run instrument:check` and fix reported errors.
### Matching rules (important)
TinyLink picks one instrument by matching connector + `instruments[].match` rules.
- 0 matches: payload is dropped (`no matching instrument config`).
- More than 1 match: payload is dropped (`ambiguous instrument match`).
- Exactly 1 match: translator runs and message continues through queue + delivery.
TinyLink is strict because your audit trail deserves peace and quiet.
## Canonical Payload Shape
After parsing and normalization, TinyLink expects this shape.
Required fields:
- `instrument_id`
- `sample_id`
- `result_time` (ISO timestamp)
- `results[]` with at least one item containing:
- `test_code`
- `value`
Optional fields:
- `unit`, `flag`, `patient_id`, `operator_id`, `meta`
Example:
```json
{
"instrument_id": "SYSMEX_XN1000",
"sample_id": "SMP-20260326-001",
"result_time": "2026-03-26T10:20:00Z",
"results": [
{
"test_code": "WBC",
"value": "8.2",
"unit": "10^3/uL",
"flag": "N"
}
],
"meta": {
"source_protocol": "HL7",
"connector": "hl7-tcp"
}
}
```
## Delivery and Retry Behavior
TinyLink does not give up easily:
- Retries transient failures (timeouts, DNS/connection issues, HTTP 5xx).
- Uses backoff schedule: `30s -> 2m -> 10m -> 30m -> 2h -> 6h`.
- Maximum attempts: `10`.
- HTTP `400` and `422` are treated as non-retriable and moved to dead letter immediately.
## Health, Readiness, and Metrics
Use these to check system status:
- `GET /health`: connector status + queue counts (`pending`, `retrying`, `deadLetters`).
- `GET /health/ready`: confirms SQLite readiness.
- `GET /metrics`: Prometheus-style metrics, including attempts, latency, and success timestamp.
## Daily Operations (Quick Checklist)
- Start app: `npm start`
- Validate instrument files: `npm run instrument:check`
- Run migrations manually: `npm run migrate`
- Backup DB: `npm run maintenance -- backup`
- Vacuum DB: `npm run maintenance -- vacuum`
- Prune old delivery logs: `npm run maintenance -- prune --days=30`
## Troubleshooting New Instrument Onboarding
### Instrument returns `404`
- Confirm exact `instrument_id` spelling and casing.
- Verify the instrument exists in `middleware/config/app.yaml` as its own top-level key (`inst1`, `inst2`, ...).
- Verify that instrument has `connector.type`, connector settings, and `translator.parser`.
### Data is not flowing
- Confirm instrument has `enabled: true`.
- Confirm connector matches incoming protocol.
- Confirm match rules fit connector metadata (`localPort`, `remoteAddress`, `remotePort`, or `comPort`).
- Check `/health` and `/health/ready`.
### Backlog or dead letters are growing
- Check `/metrics` for queue and delivery trends.
- Validate CLQMS URL/token/timeouts and downstream availability.
## Final Tip
If you are adding many instruments, standardize your naming (for example: `LAB1_XN1000`, `LAB1_COBAS_E411`) and keep connector mappings documented. Future-you will send present-you a thank-you card.

View File

@ -10,7 +10,7 @@ Build a lightweight Node.js service that:
## Responsibilities
- **Middleware:** connector protocols (HTTP JSON, HL7 TCP, ASTM serial/TCP), parsing/normalization, schema checks, durable queue, retries, dead-letter, logging, health endpoints.
- **Middleware:** connector protocols (HTTP JSON, HL7 TCP, ASTM serial), parsing/normalization, schema checks, durable queue, retries, dead-letter, logging, health endpoints.
- **CLQMS:** domain validation, mapping rules, result persistence, workflow/flags/audit.
## Flow
@ -132,8 +132,9 @@ Example:
## Phase 2 Completion Notes
- Instruments are provisioned via `instrument_config` rows (connector, enabled flag, JSON payload) and can be managed through `POST /instruments` plus the instrumentation console for quick updates.
- Each connector validates against configured instruments so HL7/ASTM parsers are only accepted for known, enabled equipment.
- Instruments are provisioned from `middleware/config/app.yaml`, a single file containing `host` runtime settings and `instruments[]` entries with embedded connector, match, config, and translator settings.
- The `/instruments` route is read-only for visibility; instrument onboarding is file-driven.
- Each connector validates against loaded instrument files so only known, enabled equipment is accepted.
- Deduplication now guarded by SHA-256 `dedupe_key`, and instrument metadata is carried through the pipeline.
## Metrics & Observability
@ -151,4 +152,5 @@ Example:
## Testing & Validation
- Parser smoke tests under `middleware/test/parsers.test.js` verify HL7/ASTM canonical output and keep `normalize()` coverage intact. Run via `npm test`.
- Instrument config integrity check runs via `npm run instrument:check`; startup performs the same validation and fails fast on errors.
- Future CI can run the same script plus `npm run migrate` ahead of any pull request to ensure schema/queue logic still applies.

65
middleware/config/app.js Normal file
View File

@ -0,0 +1,65 @@
const fs = require('fs');
const path = require('path');
const YAML = require('yaml');
const configPath = path.join(__dirname, 'app.yaml');
function toInt(value, fallback) {
const parsed = Number.parseInt(value, 10);
return Number.isFinite(parsed) ? parsed : fallback;
}
function loadYamlConfig(filePath) {
if (!fs.existsSync(filePath)) {
throw new Error(`config file not found: ${filePath}`);
}
const raw = fs.readFileSync(filePath, 'utf8');
return YAML.parse(raw) || {};
}
function buildConfig() {
const parsed = loadYamlConfig(configPath);
const host = parsed.host || {};
const instrumentEntities = Object.entries(parsed)
.filter(([key, value]) => key !== 'host' && key !== 'instruments' && value && typeof value === 'object')
.map(([instrumentId, value]) => ({ instrument_id: instrumentId, ...value }));
return {
env: host.env || 'development',
db: {
path: host.db?.path || 'middleware/data/workstation.sqlite',
busyTimeout: toInt(host.db?.busyTimeout, 5000)
},
connectors: {
httpJsonPort: toInt(host.connectors?.httpJsonPort, 3001),
hl7TcpPort: toInt(host.connectors?.hl7TcpPort, 3002),
astmSerialPath: host.connectors?.astmSerialPath || 'COM1',
astmSerialBaudRate: toInt(host.connectors?.astmSerialBaudRate, 9600),
astmSerialDataBits: toInt(host.connectors?.astmSerialDataBits, 8),
astmSerialStopBits: toInt(host.connectors?.astmSerialStopBits, 1),
astmSerialParity: host.connectors?.astmSerialParity || 'none'
},
clqms: {
url: host.url || host.clqms?.url || '',
token: host.apikey || host.clqms?.token || '',
timeout: toInt(host.clqms?.timeout, 8000)
},
healthPort: toInt(host.port || host.healthPort, 4001),
worker: {
pollInterval: toInt(host.worker?.pollInterval, 5000),
batchSize: toInt(host.worker?.batchSize, 5),
lockTTLSeconds: toInt(host.worker?.lockTTLSeconds, 60),
workerId: host.worker?.workerId || 'worker-default'
},
retries: {
schedule: host.retries?.schedule || [30, 120, 600, 1800, 7200, 21600],
maxAttempts: toInt(host.retries?.maxAttempts, 10)
},
instruments: Array.isArray(parsed.instruments) ? parsed.instruments : [],
instrumentEntities,
configPath
};
}
module.exports = buildConfig();

View File

@ -0,0 +1,25 @@
host:
url: http://localhost:4000/api/results
apikey: ""
port: 4001
inst1:
enabled: true
connector:
type: serial
port: COM1
baudRate: 9600
dataBits: 8
stopBits: 1
parity: none
config:
location: default-lab
communication_mode: unidirectional
note: ASTM instrument over serial COM
translator:
parser: astm
forceInstrumentId: true
meta:
translator: msg1
connection: serial
direction: mono

View File

@ -1,32 +0,0 @@
const path = require('path');
const root = path.join(__dirname, '..');
module.exports = {
env: process.env.NODE_ENV || 'development',
db: {
path: process.env.DB_PATH || path.join(root, 'data', 'workstation.sqlite'),
busyTimeout: 5000
},
connectors: {
httpJsonPort: Number(process.env.HTTP_JSON_PORT || 3001),
hl7TcpPort: Number(process.env.HL7_TCP_PORT || 3002),
astmTcpPort: Number(process.env.ASTM_TCP_PORT || 3003)
},
clqms: {
url: process.env.CLQMS_URL || 'http://localhost:4000/api/results',
token: process.env.CLQMS_TOKEN || '',
timeout: Number(process.env.CLQMS_TIMEOUT || 8000)
},
healthPort: Number(process.env.HEALTH_PORT || 4001),
worker: {
pollInterval: Number(process.env.WORKER_POLL_INTERVAL || 5000),
batchSize: Number(process.env.WORKER_BATCH_SIZE || 5),
lockTTLSeconds: Number(process.env.WORKER_LOCK_TTL || 60),
workerId: process.env.WORKER_ID || `worker-${process.pid}`
},
retries: {
schedule: [30, 120, 600, 1800, 7200, 21600],
maxAttempts: Number(process.env.MAX_ATTEMPTS || 10)
}
};

Binary file not shown.

View File

@ -1,5 +1,5 @@
const { request } = require('undici');
const config = require('../../config/default');
const config = require('../../config/app');
async function deliver(payload) {
const body = JSON.stringify(payload);

View File

@ -1,61 +1,105 @@
const net = require('net');
const config = require('../../config/default');
const { SerialPort } = require('serialport');
const config = require('../../config/app');
const logger = require('../utils/logger');
function createAstmSerialConnector() {
let server;
function createAstmSerialConnector(options = {}) {
let port;
let messageHandler = async () => {};
let errorHandler = (err) => logger.error({ err }, 'astm connector error');
const serialPath = options.port || options.comPort || config.connectors.astmSerialPath;
const baudRate = Number(options.baudRate || config.connectors.astmSerialBaudRate);
const dataBits = Number(options.dataBits || config.connectors.astmSerialDataBits);
const stopBits = Number(options.stopBits || config.connectors.astmSerialStopBits);
const parity = options.parity || config.connectors.astmSerialParity;
const instrumentId = options.instrument_id || null;
const connections = new Set();
function attach(socket) {
connections.add(socket);
let buffer = '';
socket.on('data', (chunk) => {
buffer += chunk.toString('utf8');
if (buffer.includes('\n')) {
const [line, ...rest] = buffer.split('\n');
buffer = rest.join('\n');
if (line.trim()) {
messageHandler(line.trim()).catch(errorHandler);
}
}
});
socket.on('error', (err) => errorHandler(err));
socket.on('close', () => connections.delete(socket));
function processBufferedLines(state) {
const lines = state.buffer.split(/\r?\n/);
state.buffer = lines.pop() || '';
lines
.map((line) => line.trim())
.filter(Boolean)
.forEach((line) => {
messageHandler({
payload: line,
context: {
connector: 'astm-serial',
instrument_id: instrumentId,
comPort: serialPath,
baudRate,
dataBits,
stopBits,
parity
}
}).catch(errorHandler);
});
}
return {
name: () => 'astm-serial',
type: () => 'astm-serial',
async start() {
server = net.createServer(attach);
const state = { buffer: '' };
port = new SerialPort({
path: serialPath,
baudRate,
dataBits,
stopBits,
parity,
autoOpen: false
});
port.on('data', (chunk) => {
state.buffer += chunk.toString('utf8');
if (state.buffer.includes('\n')) {
processBufferedLines(state);
}
});
port.on('error', (err) => errorHandler(err));
return new Promise((resolve, reject) => {
server.listen(config.connectors.astmTcpPort, () => {
logger.info({ port: config.connectors.astmTcpPort }, 'astm serial connector listening');
port.open((err) => {
if (err) {
errorHandler(err);
return reject(err);
}
logger.info({
instrument_id: instrumentId,
comPort: serialPath,
baudRate,
dataBits,
stopBits,
parity
}, 'astm serial connector opened');
resolve();
});
server.on('error', (err) => {
errorHandler(err);
reject(err);
});
});
},
async stop() {
for (const socket of connections) {
socket.destroy();
if (!port) return;
if (!port.isOpen) {
port.removeAllListeners();
port = null;
return;
}
if (!server) return;
return new Promise((resolve, reject) => {
server.close((err) => {
port.close((err) => {
if (err) return reject(err);
port.removeAllListeners();
port = null;
resolve();
});
});
},
health() {
return { status: server ? 'up' : 'down', port: config.connectors.astmTcpPort };
return {
status: port && port.isOpen ? 'up' : 'down',
instrument_id: instrumentId,
comPort: serialPath,
baudRate
};
},
onMessage(handler) {
messageHandler = handler;

View File

@ -1,18 +1,29 @@
const net = require('net');
const config = require('../../config/default');
const config = require('../../config/app');
const logger = require('../utils/logger');
function createHl7TcpConnector() {
function createHl7TcpConnector(options = {}) {
let server;
let messageHandler = async () => {};
let errorHandler = (err) => logger.error({ err }, 'hl7 connector error');
const port = Number(options.port || config.connectors.hl7TcpPort);
const instrumentId = options.instrument_id || null;
const connections = new Set();
function handleData(socket, chunk) {
const payload = chunk.toString('utf8').trim();
if (!payload) return;
messageHandler(payload).catch(errorHandler);
messageHandler({
payload,
context: {
connector: 'hl7-tcp',
instrument_id: instrumentId,
remoteAddress: socket.remoteAddress,
remotePort: socket.remotePort,
localPort: socket.localPort || port
}
}).catch(errorHandler);
}
function attach(socket) {
@ -28,8 +39,8 @@ function createHl7TcpConnector() {
async start() {
server = net.createServer(attach);
return new Promise((resolve, reject) => {
server.listen(config.connectors.hl7TcpPort, () => {
logger.info({ port: config.connectors.hl7TcpPort }, 'hl7 tcp connector listening');
server.listen(port, () => {
logger.info({ port, instrument_id: instrumentId }, 'hl7 tcp connector listening');
resolve();
});
server.on('error', (err) => {
@ -51,7 +62,7 @@ function createHl7TcpConnector() {
});
},
health() {
return { status: server ? 'up' : 'down', port: config.connectors.hl7TcpPort };
return { status: server ? 'up' : 'down', port, instrument_id: instrumentId };
},
onMessage(handler) {
messageHandler = handler;

View File

@ -1,17 +1,28 @@
const express = require('express');
const config = require('../../config/default');
const config = require('../../config/app');
const logger = require('../utils/logger');
function createHttpJsonConnector() {
function createHttpJsonConnector(options = {}) {
let server;
let messageHandler = async () => {};
let errorHandler = (err) => logger.error({ err }, 'connector error');
const port = Number(options.port || config.connectors.httpJsonPort);
const instrumentId = options.instrument_id || null;
const app = express();
app.use(express.json());
app.post('/messages', async (req, res) => {
try {
await messageHandler(req.body);
await messageHandler({
payload: req.body,
context: {
connector: 'http-json',
instrument_id: instrumentId,
remoteAddress: req.socket?.remoteAddress,
remotePort: req.socket?.remotePort,
localPort: req.socket?.localPort || port
}
});
res.status(202).json({ status: 'queued' });
} catch (err) {
errorHandler(err);
@ -24,8 +35,8 @@ function createHttpJsonConnector() {
type: () => 'http-json',
async start() {
return new Promise((resolve) => {
server = app.listen(config.connectors.httpJsonPort, () => {
logger.info({ port: config.connectors.httpJsonPort }, 'http-json connector listening');
server = app.listen(port, () => {
logger.info({ port, instrument_id: instrumentId }, 'http-json connector listening');
resolve();
});
});
@ -40,7 +51,7 @@ function createHttpJsonConnector() {
});
},
health() {
return { status: server ? 'up' : 'down', port: config.connectors.httpJsonPort };
return { status: server ? 'up' : 'down', port, instrument_id: instrumentId };
},
onMessage(handler) {
messageHandler = handler;

View File

@ -1,5 +1,5 @@
const express = require('express');
const config = require('../../config/default');
const config = require('../config/app');
const logger = require('./utils/logger');
const migrate = require('./storage/migrate');
const { createHttpJsonConnector } = require('./connectors/httpJsonConnector');
@ -8,24 +8,48 @@ const { createAstmSerialConnector } = require('./connectors/astmSerialConnector'
const { processMessage } = require('./pipeline/workflow');
const { startWorker, stopWorker } = require('./pipeline/deliveryWorker');
const instrumentService = require('./instrumentConfig/service');
const { validateAndLoadInstrumentConfigs } = require('./instrumentConfig/validator');
const { createHealthRouter } = require('./routes/health');
const { router: instrumentRouter } = require('./routes/instrumentConfig');
const metricsRouter = require('./routes/metrics');
async function bootstrap() {
validateAndLoadInstrumentConfigs();
await migrate();
await instrumentService.init();
const connectors = [
createHttpJsonConnector(),
createHl7TcpConnector(),
createAstmSerialConnector()
];
const connectorFactories = {
'http-json': createHttpJsonConnector,
'hl7-tcp': createHl7TcpConnector,
'astm-serial': createAstmSerialConnector
};
const connectors = instrumentService.list()
.filter((entry) => entry.enabled)
.map((entry) => {
const createConnector = connectorFactories[entry.connector];
if (!createConnector) {
logger.warn({ connector: entry.connector, instrument_id: entry.instrument_id }, 'unknown connector in instrument config, skipping startup');
return null;
}
return createConnector({
...(entry.connectorConfig || {}),
instrument_id: entry.instrument_id
});
})
.filter(Boolean);
if (!connectors.length) {
logger.warn('no enabled connectors configured, ingestion listeners are disabled');
}
connectors.forEach((connector) => {
connector.onMessage(async (payload) => {
connector.onMessage(async (incoming) => {
try {
await processMessage(connector.name(), payload);
const payload = incoming && Object.prototype.hasOwnProperty.call(incoming, 'payload')
? incoming.payload
: incoming;
const context = incoming && incoming.context ? incoming.context : {};
await processMessage(connector.name(), payload, context);
} catch (err) {
logger.error({ err: err.message, connector: connector.name() }, 'pipeline error');
}

View File

@ -1,4 +1,4 @@
const store = require('../storage/instrumentConfigStore');
const store = require('../storage/instrumentConfigFileStore');
const logger = require('../utils/logger');
let cache = new Map();
@ -9,8 +9,22 @@ async function reload() {
const next = new Map();
rows.forEach((row) => {
try {
const config = row.config ? JSON.parse(row.config) : {};
next.set(row.instrument_id, { instrument_id: row.instrument_id, connector: row.connector, enabled: Boolean(row.enabled), config });
if (row.error) {
throw new Error(row.error);
}
if (!row.instrument_id || !row.connector) {
throw new Error('instrument_id and connector are required');
}
next.set(row.instrument_id, {
instrument_id: row.instrument_id,
connector: row.connector,
enabled: Boolean(row.enabled),
config: row.config || {},
match: row.match || {},
translator: row.translator || {},
connectorConfig: row.connectorConfig || {},
files: row.files || null
});
} catch (err) {
logger.warn({ instrument: row.instrument_id, err: err.message }, 'failed parsing instrument config, skipping');
}
@ -38,10 +52,63 @@ function byConnector(connector) {
return list().filter((entry) => entry.connector === connector && entry.enabled);
}
async function upsert(payload) {
const stored = await store.upsert(payload);
await reload();
return get(stored.instrument_id);
function addressesEqual(expected, actual) {
if (!expected) return true;
if (!actual) return false;
const normalize = (value) => String(value).replace('::ffff:', '');
return normalize(expected) === normalize(actual);
}
function portsEqual(expected, actual) {
if (expected === undefined || expected === null || expected === '') return true;
if (actual === undefined || actual === null || actual === '') return false;
return Number(expected) === Number(actual);
}
function comPortsEqual(expected, actual) {
if (expected === undefined || expected === null || expected === '') return true;
if (actual === undefined || actual === null || actual === '') return false;
return String(expected).trim().toLowerCase() === String(actual).trim().toLowerCase();
}
function matches(entry, connector, context = {}) {
if (context.instrument_id && context.instrument_id !== entry.instrument_id) return false;
if (!entry.enabled || entry.connector !== connector) return false;
const rule = entry.match || {};
if (!portsEqual(rule.localPort, context.localPort)) return false;
if (!portsEqual(rule.remotePort, context.remotePort)) return false;
if (!addressesEqual(rule.remoteAddress, context.remoteAddress)) return false;
if (!comPortsEqual(rule.comPort || rule.serialPort, context.comPort || context.serialPort)) return false;
return true;
}
function resolveForMessage(connector, context = {}) {
const candidates = byConnector(connector).filter((entry) => matches(entry, connector, context));
if (!candidates.length) {
return { status: 'no_match', matches: [] };
}
if (candidates.length > 1) {
return { status: 'ambiguous', matches: candidates.map((entry) => entry.instrument_id) };
}
return { status: 'matched', entry: candidates[0] };
}
function applyTranslator(entry, parsedPayload, connector) {
const translator = entry.translator || {};
const overrides = translator.overrides && typeof translator.overrides === 'object'
? translator.overrides
: {};
const canonical = { ...parsedPayload, ...overrides };
if (translator.forceInstrumentId !== false) {
canonical.instrument_id = entry.instrument_id;
}
canonical.meta = {
...(parsedPayload.meta || {}),
...(translator.meta && typeof translator.meta === 'object' ? translator.meta : {}),
connector,
instrument_config: entry.config
};
return canonical;
}
module.exports = {
@ -49,5 +116,7 @@ module.exports = {
list,
get,
byConnector,
upsert
reload,
resolveForMessage,
applyTranslator
};

View File

@ -0,0 +1,152 @@
const fs = require('fs');
const config = require('../../config/app');
function normalizeConnectorType(type) {
const value = String(type || '').trim().toLowerCase();
if (value === 'serial' || value === 'astm-serial') return 'astm-serial';
if (value === 'tcp-server' || value === 'hl7-tcp') return 'hl7-tcp';
if (value === 'http-json' || value === 'http') return 'http-json';
if (value === 'tcp-client') return 'tcp-client';
return '';
}
function defaultParserForConnector(connector) {
if (connector === 'astm-serial') return 'astm';
if (connector === 'hl7-tcp') return 'hl7';
if (connector === 'http-json') return 'http-json';
return '';
}
function toEntityRows(entities = []) {
return entities.map((entity) => {
const connector = entity.connector && typeof entity.connector === 'object' ? entity.connector : {};
return {
instrument_id: entity.instrument_id,
enabled: entity.enabled,
connector: connector.type,
connectorConfig: connector,
config: entity.config,
translator: entity.translator
};
});
}
class InstrumentConfigValidationError extends Error {
constructor(errors) {
super(`instrument config validation failed with ${errors.length} issue(s)`);
this.name = 'InstrumentConfigValidationError';
this.errors = errors;
}
}
function validateAndLoadInstrumentConfigs({
instruments = config.instrumentEntities?.length ? toEntityRows(config.instrumentEntities) : config.instruments,
configFilePath = config.configPath
} = {}) {
const errors = [];
const entries = [];
const instrumentIds = new Set();
if (configFilePath && !fs.existsSync(configFilePath)) {
errors.push(`config file not found: ${configFilePath}`);
throw new InstrumentConfigValidationError(errors);
}
if (!Array.isArray(instruments)) {
errors.push('config.instruments: expected an array');
throw new InstrumentConfigValidationError(errors);
}
if (!instruments.length) {
errors.push('config.instruments: array cannot be empty');
}
for (let index = 0; index < instruments.length; index += 1) {
const item = instruments[index];
const label = `instrument[${index}]`;
if (!item || typeof item !== 'object' || Array.isArray(item)) {
errors.push(`${label}: must be an object`);
continue;
}
const instrumentId = item.instrument_id;
const connector = normalizeConnectorType(item.connector);
const translator = item.translator;
if (!instrumentId || typeof instrumentId !== 'string') {
errors.push(`${label}: instrument_id is required`);
continue;
}
if (instrumentIds.has(instrumentId)) {
errors.push(`${label}: duplicate instrument_id "${instrumentId}"`);
continue;
}
instrumentIds.add(instrumentId);
if (!connector) {
errors.push(`${label}: connector.type is required`);
continue;
}
if (connector === 'tcp-client') {
errors.push(`${label}: connector.type tcp-client is not supported yet`);
continue;
}
if (!translator || typeof translator !== 'object' || Array.isArray(translator)) {
item.translator = { parser: defaultParserForConnector(connector) };
}
const resolvedTranslator = item.translator;
if (!resolvedTranslator.parser || typeof resolvedTranslator.parser !== 'string') {
errors.push(`${label}: translator.parser is required`);
continue;
}
const connectorConfig = item.connectorConfig && typeof item.connectorConfig === 'object'
? item.connectorConfig
: {};
const match = item.match && typeof item.match === 'object' ? { ...item.match } : {};
if (connector === 'astm-serial') {
const comPort = connectorConfig.port || connectorConfig.comPort;
if (!comPort || typeof comPort !== 'string') {
errors.push(`${label}: connector.port is required for serial`);
continue;
}
match.comPort = comPort;
}
if (connector === 'hl7-tcp' || connector === 'http-json') {
const localPort = connectorConfig.port || connectorConfig.localPort;
if (localPort !== undefined && localPort !== null && localPort !== '') {
match.localPort = Number(localPort);
}
}
entries.push({
instrument_id: instrumentId,
connector,
enabled: item.enabled !== false,
match,
config: item.config || {},
translator: resolvedTranslator,
connectorConfig,
files: {
config: configFilePath
}
});
}
if (errors.length) {
throw new InstrumentConfigValidationError(errors);
}
return entries;
}
module.exports = {
InstrumentConfigValidationError,
validateAndLoadInstrumentConfigs
};

View File

@ -1,7 +1,7 @@
const queue = require('../queue/sqliteQueue');
const client = require('../client/clqmsClient');
const logger = require('../utils/logger');
const config = require('../../config/default');
const config = require('../../config/app');
let running = false;
let workerPromise;

View File

@ -1,5 +1,4 @@
const queue = require('../queue/sqliteQueue');
const config = require('../../config/default');
const logger = require('../utils/logger');
const { normalize } = require('../normalizers');
const { dedupeKey } = require('../utils/hash');
@ -8,32 +7,43 @@ const instrumentService = require('../instrumentConfig/service');
const parserMap = {
'http-json': require('../parsers/httpParser'),
'hl7-tcp': require('../parsers/hl7Parser'),
'astm-serial': require('../parsers/astmParser')
'astm-serial': require('../parsers/astmParser'),
hl7: require('../parsers/hl7Parser'),
astm: require('../parsers/astmParser'),
http: require('../parsers/httpParser')
};
async function processMessage(connector, rawPayload) {
function resolveParser(connector, instrumentEntry) {
const parserName = instrumentEntry?.translator?.parser || connector;
const parser = parserMap[parserName];
if (!parser) {
throw new Error(`no parser registered for ${parserName}`);
}
return parser;
}
async function processMessage(connector, rawPayload, context = {}) {
const rawRecord = await queue.insertRaw(connector, rawPayload);
const rawId = rawRecord?.lastID;
try {
const parser = parserMap[connector];
if (!parser) {
throw new Error(`no parser registered for ${connector}`);
const matcher = instrumentService.resolveForMessage(connector, context);
if (matcher.status === 'no_match') {
logger.warn({ connector, context }, 'no matching instrument config, dropping payload');
await queue.markRawParsed(rawId, 'dropped', 'no matching instrument config');
return { dropped: true };
}
if (matcher.status === 'ambiguous') {
logger.warn({ connector, context, matches: matcher.matches }, 'ambiguous instrument match, dropping payload');
await queue.markRawParsed(rawId, 'dropped', 'ambiguous instrument match');
return { dropped: true };
}
const instrumentEntry = matcher.entry;
const parser = resolveParser(connector, instrumentEntry);
const parsed = await parser.parse(rawPayload);
const canonical = normalize(parsed);
const instrumentEntry = instrumentService.get(canonical.instrument_id);
if (!instrumentEntry || !instrumentEntry.enabled) {
logger.warn({ instrument: canonical.instrument_id, connector }, 'no enabled instrument config, dropping payload');
await queue.markRawParsed(rawId, 'dropped', 'instrument disabled/no config');
return { dropped: true };
}
if (instrumentEntry.connector !== connector) {
logger.warn({ instrument: canonical.instrument_id, expected: instrumentEntry.connector, actual: connector }, 'connector mismatch for instrument');
await queue.markRawParsed(rawId, 'dropped', 'connector mismatch');
return { dropped: true };
}
const translated = instrumentService.applyTranslator(instrumentEntry, parsed, connector);
const canonical = normalize(translated);
const dedupe = dedupeKey(canonical);
canonical.meta = { ...(canonical.meta || {}), instrument_config: instrumentEntry.config };
const inserted = await queue.insertOutbox(canonical, dedupe);
await queue.markRawParsed(rawId, 'processed');
if (inserted && inserted.duplicate) {

View File

@ -1,6 +1,6 @@
const crypto = require('crypto');
const DatabaseClient = require('../storage/db');
const config = require('../../config/default');
const config = require('../../config/app');
class SqliteQueue {
constructor() {

View File

@ -1,16 +1,8 @@
const express = require('express');
const { z } = require('zod');
const service = require('../instrumentConfig/service');
const router = express.Router();
const schema = z.object({
instrument_id: z.string().min(1),
connector: z.enum(['http-json', 'hl7-tcp', 'astm-serial']),
enabled: z.boolean().optional().default(true),
config: z.record(z.any()).optional()
});
router.get('/', async (req, res) => {
res.json(service.list());
});
@ -23,14 +15,4 @@ router.get('/:id', async (req, res) => {
res.json(entry);
});
router.post('/', express.json(), async (req, res) => {
try {
const payload = schema.parse(req.body);
const saved = await service.upsert(payload);
res.status(201).json(saved);
} catch (err) {
res.status(400).json({ error: err.message });
}
});
module.exports = { router };

View File

@ -0,0 +1,23 @@
const {
InstrumentConfigValidationError,
validateAndLoadInstrumentConfigs
} = require('../instrumentConfig/validator');
function main() {
try {
const entries = validateAndLoadInstrumentConfigs();
console.log(`instrument check passed (${entries.length} instrument(s))`);
} catch (err) {
if (err instanceof InstrumentConfigValidationError) {
console.error('instrument check failed:');
err.errors.forEach((issue) => {
console.error(`- ${issue}`);
});
process.exit(1);
}
console.error('instrument check failed:', err.message);
process.exit(1);
}
}
main();

View File

@ -1,7 +1,7 @@
const fs = require('fs');
const path = require('path');
const sqlite3 = require('sqlite3');
const config = require('../../config/default');
const config = require('../../config/app');
const dbPath = config.db.path;

View File

@ -0,0 +1,9 @@
const { validateAndLoadInstrumentConfigs } = require('../instrumentConfig/validator');
class InstrumentConfigFileStore {
async list() {
return validateAndLoadInstrumentConfigs();
}
}
module.exports = new InstrumentConfigFileStore();

View File

@ -1,5 +1,5 @@
const DatabaseClient = require('./db');
const config = require('../../config/default');
const config = require('../../config/app');
class InstrumentConfigStore {
constructor() {

View File

@ -1,16 +1,73 @@
const fs = require('fs');
const path = require('path');
const DatabaseClient = require('./db');
const config = require('../../config/default');
const config = require('../../config/app');
const LOCK_MIGRATION_FILE = '002_outbox_locks.sql';
async function ensureMigrationTable(db) {
await db.exec(`
CREATE TABLE IF NOT EXISTS schema_migrations (
filename TEXT PRIMARY KEY,
applied_at TEXT NOT NULL DEFAULT (datetime('now'))
);
`);
}
async function migrationIsApplied(db, filename) {
const row = await db.get(
'SELECT filename FROM schema_migrations WHERE filename = ? LIMIT 1',
[filename]
);
return Boolean(row);
}
async function markMigrationApplied(db, filename) {
await db.run(
'INSERT OR IGNORE INTO schema_migrations (filename) VALUES (?)',
[filename]
);
}
async function applyOutboxLockMigration(db) {
const columns = await db.all('PRAGMA table_info(outbox_result)');
const columnNames = new Set(columns.map((column) => column.name));
if (!columnNames.has('locked_at')) {
await db.exec('ALTER TABLE outbox_result ADD COLUMN locked_at INTEGER NULL;');
}
if (!columnNames.has('locked_by')) {
await db.exec('ALTER TABLE outbox_result ADD COLUMN locked_by TEXT NULL;');
}
}
async function migrate() {
const db = new DatabaseClient(config.db);
const migrationsDir = path.join(__dirname, '..', '..', 'db', 'migrations');
const files = fs.readdirSync(migrationsDir).filter((name) => name.endsWith('.sql'));
const files = fs
.readdirSync(migrationsDir)
.filter((name) => name.endsWith('.sql'))
.sort();
await ensureMigrationTable(db);
for (const file of files) {
if (await migrationIsApplied(db, file)) {
continue;
}
if (file === LOCK_MIGRATION_FILE) {
await applyOutboxLockMigration(db);
await markMigrationApplied(db, file);
continue;
}
const payload = fs.readFileSync(path.join(migrationsDir, file), 'utf8');
await db.exec(payload);
await markMigrationApplied(db, file);
}
await db.close();
}

View File

@ -1,5 +1,5 @@
const pino = require('pino');
const config = require('../../config/default');
const config = require('../../config/app');
const logger = pino({
level: process.env.LOG_LEVEL || 'info',

338
package-lock.json generated
View File

@ -1,5 +1,5 @@
{
"name": "pandalink",
"name": "tinylink",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
@ -11,8 +11,10 @@
"dependencies": {
"express": "^5.2.1",
"pino": "^10.3.1",
"serialport": "^12.0.0",
"sqlite3": "^6.0.1",
"undici": "^7.24.6",
"yaml": "^2.8.3",
"zod": "^4.3.6"
}
},
@ -84,6 +86,263 @@
"integrity": "sha512-k2ENnmBugE/rzQfEcdWHcCY+/FM3VLzH9cYEsbdsoqrvzAKRhUZeRNhAZvB8OitQJ1TBed3yqWtdjzS6wJKBwg==",
"license": "MIT"
},
"node_modules/@serialport/binding-mock": {
"version": "10.2.2",
"resolved": "https://registry.npmjs.org/@serialport/binding-mock/-/binding-mock-10.2.2.tgz",
"integrity": "sha512-HAFzGhk9OuFMpuor7aT5G1ChPgn5qSsklTFOTUX72Rl6p0xwcSVsRtG/xaGp6bxpN7fI9D/S8THLBWbBgS6ldw==",
"license": "MIT",
"dependencies": {
"@serialport/bindings-interface": "^1.2.1",
"debug": "^4.3.3"
},
"engines": {
"node": ">=12.0.0"
}
},
"node_modules/@serialport/bindings-cpp": {
"version": "12.0.1",
"resolved": "https://registry.npmjs.org/@serialport/bindings-cpp/-/bindings-cpp-12.0.1.tgz",
"integrity": "sha512-r2XOwY2dDvbW7dKqSPIk2gzsr6M6Qpe9+/Ngs94fNaNlcTRCV02PfaoDmRgcubpNVVcLATlxSxPTIDw12dbKOg==",
"hasInstallScript": true,
"license": "MIT",
"dependencies": {
"@serialport/bindings-interface": "1.2.2",
"@serialport/parser-readline": "11.0.0",
"debug": "4.3.4",
"node-addon-api": "7.0.0",
"node-gyp-build": "4.6.0"
},
"engines": {
"node": ">=16.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-delimiter": {
"version": "11.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-delimiter/-/parser-delimiter-11.0.0.tgz",
"integrity": "sha512-aZLJhlRTjSmEwllLG7S4J8s8ctRAS0cbvCpO87smLvl3e4BgzbVgF6Z6zaJd3Aji2uSiYgfedCdNc4L6W+1E2g==",
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/bindings-cpp/node_modules/@serialport/parser-readline": {
"version": "11.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-readline/-/parser-readline-11.0.0.tgz",
"integrity": "sha512-rRAivhRkT3YO28WjmmG4FQX6L+KMb5/ikhyylRfzWPw0nSXy97+u07peS9CbHqaNvJkMhH1locp2H36aGMOEIA==",
"license": "MIT",
"dependencies": {
"@serialport/parser-delimiter": "11.0.0"
},
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/bindings-cpp/node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"license": "MIT",
"dependencies": {
"ms": "2.1.2"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/@serialport/bindings-cpp/node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
"license": "MIT"
},
"node_modules/@serialport/bindings-cpp/node_modules/node-addon-api": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.0.0.tgz",
"integrity": "sha512-vgbBJTS4m5/KkE16t5Ly0WW9hz46swAstv0hYYwMtbG7AznRhNyfLRe8HZAiWIpcHzoO7HxhLuBQj9rJ/Ho0ZA==",
"license": "MIT"
},
"node_modules/@serialport/bindings-interface": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/@serialport/bindings-interface/-/bindings-interface-1.2.2.tgz",
"integrity": "sha512-CJaUd5bLvtM9c5dmO9rPBHPXTa9R2UwpkJ0wdh9JCYcbrPWsKz+ErvR0hBLeo7NPeiFdjFO4sonRljiw4d2XiA==",
"license": "MIT",
"engines": {
"node": "^12.22 || ^14.13 || >=16"
}
},
"node_modules/@serialport/parser-byte-length": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-byte-length/-/parser-byte-length-12.0.0.tgz",
"integrity": "sha512-0ei0txFAj+s6FTiCJFBJ1T2hpKkX8Md0Pu6dqMrYoirjPskDLJRgZGLqoy3/lnU1bkvHpnJO+9oJ3PB9v8rNlg==",
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/parser-cctalk": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-cctalk/-/parser-cctalk-12.0.0.tgz",
"integrity": "sha512-0PfLzO9t2X5ufKuBO34DQKLXrCCqS9xz2D0pfuaLNeTkyGUBv426zxoMf3rsMRodDOZNbFblu3Ae84MOQXjnZw==",
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/parser-delimiter": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-delimiter/-/parser-delimiter-12.0.0.tgz",
"integrity": "sha512-gu26tVt5lQoybhorLTPsH2j2LnX3AOP2x/34+DUSTNaUTzu2fBXw+isVjQJpUBFWu6aeQRZw5bJol5X9Gxjblw==",
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/parser-inter-byte-timeout": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-inter-byte-timeout/-/parser-inter-byte-timeout-12.0.0.tgz",
"integrity": "sha512-GnCh8K0NAESfhCuXAt+FfBRz1Cf9CzIgXfp7SdMgXwrtuUnCC/yuRTUFWRvuzhYKoAo1TL0hhUo77SFHUH1T/w==",
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/parser-packet-length": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-packet-length/-/parser-packet-length-12.0.0.tgz",
"integrity": "sha512-p1hiCRqvGHHLCN/8ZiPUY/G0zrxd7gtZs251n+cfNTn+87rwcdUeu9Dps3Aadx30/sOGGFL6brIRGK4l/t7MuQ==",
"license": "MIT",
"engines": {
"node": ">=8.6.0"
}
},
"node_modules/@serialport/parser-readline": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-readline/-/parser-readline-12.0.0.tgz",
"integrity": "sha512-O7cywCWC8PiOMvo/gglEBfAkLjp/SENEML46BXDykfKP5mTPM46XMaX1L0waWU6DXJpBgjaL7+yX6VriVPbN4w==",
"license": "MIT",
"dependencies": {
"@serialport/parser-delimiter": "12.0.0"
},
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/parser-ready": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-ready/-/parser-ready-12.0.0.tgz",
"integrity": "sha512-ygDwj3O4SDpZlbrRUraoXIoIqb8sM7aMKryGjYTIF0JRnKeB1ys8+wIp0RFMdFbO62YriUDextHB5Um5cKFSWg==",
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/parser-regex": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-regex/-/parser-regex-12.0.0.tgz",
"integrity": "sha512-dCAVh4P/pZrLcPv9NJ2mvPRBg64L5jXuiRxIlyxxdZGH4WubwXVXY/kBTihQmiAMPxbT3yshSX8f2+feqWsxqA==",
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/parser-slip-encoder": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-slip-encoder/-/parser-slip-encoder-12.0.0.tgz",
"integrity": "sha512-0APxDGR9YvJXTRfY+uRGhzOhTpU5akSH183RUcwzN7QXh8/1jwFsFLCu0grmAUfi+fItCkR+Xr1TcNJLR13VNA==",
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/parser-spacepacket": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/parser-spacepacket/-/parser-spacepacket-12.0.0.tgz",
"integrity": "sha512-dozONxhPC/78pntuxpz/NOtVps8qIc/UZzdc/LuPvVsqCoJXiRxOg6ZtCP/W58iibJDKPZPAWPGYeZt9DJxI+Q==",
"license": "MIT",
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/stream": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/@serialport/stream/-/stream-12.0.0.tgz",
"integrity": "sha512-9On64rhzuqKdOQyiYLYv2lQOh3TZU/D3+IWCR5gk0alPel2nwpp4YwDEGiUBfrQZEdQ6xww0PWkzqth4wqwX3Q==",
"license": "MIT",
"dependencies": {
"@serialport/bindings-interface": "1.2.2",
"debug": "4.3.4"
},
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/@serialport/stream/node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"license": "MIT",
"dependencies": {
"ms": "2.1.2"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/@serialport/stream/node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
"license": "MIT"
},
"node_modules/abbrev": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/abbrev/-/abbrev-4.0.0.tgz",
@ -1200,6 +1459,17 @@
"node": "^20.17.0 || >=22.9.0"
}
},
"node_modules/node-gyp-build": {
"version": "4.6.0",
"resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.6.0.tgz",
"integrity": "sha512-NTZVKn9IylLwUzaKjkas1e4u2DLNcV4rdYagA4PWdPwW87Bi7z+BznyKSRwS/761tV/lzCGXplWsiaMjLqP2zQ==",
"license": "MIT",
"bin": {
"node-gyp-build": "bin.js",
"node-gyp-build-optional": "optional.js",
"node-gyp-build-test": "build-test.js"
}
},
"node_modules/nopt": {
"version": "9.0.0",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-9.0.0.tgz",
@ -1605,6 +1875,57 @@
"url": "https://opencollective.com/express"
}
},
"node_modules/serialport": {
"version": "12.0.0",
"resolved": "https://registry.npmjs.org/serialport/-/serialport-12.0.0.tgz",
"integrity": "sha512-AmH3D9hHPFmnF/oq/rvigfiAouAKyK/TjnrkwZRYSFZxNggJxwvbAbfYrLeuvq7ktUdhuHdVdSjj852Z55R+uA==",
"license": "MIT",
"dependencies": {
"@serialport/binding-mock": "10.2.2",
"@serialport/bindings-cpp": "12.0.1",
"@serialport/parser-byte-length": "12.0.0",
"@serialport/parser-cctalk": "12.0.0",
"@serialport/parser-delimiter": "12.0.0",
"@serialport/parser-inter-byte-timeout": "12.0.0",
"@serialport/parser-packet-length": "12.0.0",
"@serialport/parser-readline": "12.0.0",
"@serialport/parser-ready": "12.0.0",
"@serialport/parser-regex": "12.0.0",
"@serialport/parser-slip-encoder": "12.0.0",
"@serialport/parser-spacepacket": "12.0.0",
"@serialport/stream": "12.0.0",
"debug": "4.3.4"
},
"engines": {
"node": ">=16.0.0"
},
"funding": {
"url": "https://opencollective.com/serialport/donate"
}
},
"node_modules/serialport/node_modules/debug": {
"version": "4.3.4",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
"integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==",
"license": "MIT",
"dependencies": {
"ms": "2.1.2"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/serialport/node_modules/ms": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==",
"license": "MIT"
},
"node_modules/serve-static": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.1.tgz",
@ -2051,6 +2372,21 @@
"node": ">=18"
}
},
"node_modules/yaml": {
"version": "2.8.3",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.3.tgz",
"integrity": "sha512-AvbaCLOO2Otw/lW5bmh9d/WEdcDFdQp2Z2ZUH3pX9U2ihyUY0nvLv7J6TrWowklRGPYbB/IuIMfYgxaCPg5Bpg==",
"license": "ISC",
"bin": {
"yaml": "bin.mjs"
},
"engines": {
"node": ">= 14.6"
},
"funding": {
"url": "https://github.com/sponsors/eemeli"
}
},
"node_modules/zod": {
"version": "4.3.6",
"resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz",

View File

@ -1,5 +1,5 @@
{
"name": "pandalink",
"name": "tinylink",
"version": "1.0.0",
"description": "Workstation middleware service",
"main": "middleware/src/index.js",
@ -10,6 +10,7 @@
"start": "node middleware/src/index.js",
"migrate": "node middleware/src/storage/migrate.js",
"maintenance": "node middleware/src/scripts/maintenance.js",
"instrument:check": "node middleware/src/scripts/instrumentCheck.js",
"test": "node middleware/test/parsers.test.js"
},
"keywords": [],
@ -19,8 +20,10 @@
"dependencies": {
"express": "^5.2.1",
"pino": "^10.3.1",
"serialport": "^12.0.0",
"sqlite3": "^6.0.1",
"undici": "^7.24.6",
"yaml": "^2.8.3",
"zod": "^4.3.6"
}
}