import * as fs from "node:fs" import * as path from "node:path" import { Buffer } from 'node:buffer'; import { unpack, pack } from 'msgpackr'; import * as pako from 'pako'; function get_latest_file_path(raw_spectral_data_dir){ let files=fs.readdirSync(raw_spectral_data_dir) files=files.sort() let latest_name=files.pop() // console.log(latest_name,files.length) return path.resolve(raw_spectral_data_dir,latest_name) } async function main(){ try { // process.send("child process started"); const raw_spectral_data_dir="C:/tmp/" let last_data_file=null let latest_data_file=null let fd_csv=null let fd_bin=null while(true){ latest_data_file=get_latest_file_path(raw_spectral_data_dir) if (latest_data_file!=last_data_file){ fd_csv=fs.openSync(latest_data_file) fd_bin=fs.openSync(path.format({ dir: path.dirname(latest_data_file), name: path.basename(latest_data_file,".csv"), ext: 'bin', })) last_data_file=latest_data_file } let last_pointer=fs.statSync(latest_data_file).size while(true){ const stat=fs.statSync(latest_data_file) if(stat.size>last_pointer){ let buffer=Buffer.alloc(stat.size-last_pointer) fs.readSync(fd_csv,buffer,0,stat.size-last_pointer,last_pointer) let info=buffer.toString().split(",") let timeStamp=Number(info[0]) let start_pointer=Number(info[1]) let length=Number(info[2]) let spectral_buffer=Buffer.alloc(length) const spectral_data=fs.readSync(fd_bin,spectral_buffer,0,length,start_pointer) let upload_data={"spectral_data_bin":spectral_buffer } let upload_data_compressed = pako.gzip(pack(upload_data)) let response=await fetch("http://127.0.0.1:5000/post", { method: "post", body: upload_data_compressed }) let response_data_compressed= await response.arrayBuffer() let response_data=unpack(pako.ungzip(response_data_compressed)) // console.log(response_data) process.send(response_data); break } } } }catch (err) { console.error(err.message); } } main().then(()=>{ // process.send("child process end"); })