diff --git a/.github/workflows/node.js.yml b/.github/workflows/node.js.yml index 9a4bb39e..7a0c2775 100644 --- a/.github/workflows/node.js.yml +++ b/.github/workflows/node.js.yml @@ -19,7 +19,7 @@ jobs: strategy: max-parallel: 1 matrix: - node-version: [16.13.0, 18] + node-version: [18, 20] steps: - uses: actions/checkout@v3 @@ -51,7 +51,6 @@ jobs: --force influx write --bucket dyne --file test/fixtures/influxdb_data.lp - run: yarn - - run: yarn add -W zenroom - run: yarn build - run: yarn test - uses: codecov/codecov-action@v3 @@ -85,4 +84,3 @@ jobs: lerna publish --no-verify-access -c -y --pre-dist-tag next --preid $(git rev-parse --short HEAD) --force-publish=* env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - diff --git a/package.json b/package.json index e44b1ff7..6a55812e 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,7 @@ ] }, "engines": { - "node": "^16.13.0 || ^18.0.0" + "node": "^18.0.0 || ^20.0.0" }, "scripts": { "prerelease": "yarn build && yarn doc && git add docs && git commit -m 'docs: 📚️ 📝 Update docs'", @@ -79,6 +79,7 @@ "husky": "^5.0.9", "lerna": "^4.0.0", "morgan": "^1.10.0", + "node-gyp": "^10.0.0", "nodemon": "^2.0.18", "prettier": "^2.2.1", "qs": "^6.9.6", diff --git a/packages/files/tests/index.ts b/packages/files/tests/index.ts index 281fffb4..504ed195 100644 --- a/packages/files/tests/index.ts +++ b/packages/files/tests/index.ts @@ -77,10 +77,10 @@ test.serial('List content of directory', async (t) => { const { app } = t.context; const res = await app.post("/files_ls"); t.is(res.status, 200, res.text); - t.is(res.body.test_dir.length, 7); + t.is(res.body.test_dir.length, 8); for(const file of res.body.test_dir) { t.is(file.blksize, 4096); t.is(file.mode.substr(0, 2), '40'); // All directory } - t.is(res.body.packages_dir.length, 18); + t.is(res.body.packages_dir.length, 19); }) diff --git a/packages/logger/src/index.ts b/packages/logger/src/index.ts index bca45bfd..77522dad 100644 --- a/packages/logger/src/index.ts +++ b/packages/logger/src/index.ts @@ -25,7 +25,7 @@ export default (req: Request, res: Response, next: NextFunction) => { rr.onSuccess(async (params) => { const {result, zencode} = params; - const addLog = (sentences: string[], where: string) => { + const addLog = async (sentences: string[], where: string) => { const absolutePath = path.resolve(path.join(LOGGER_DIR, where)); validatePath(absolutePath); const ws = fs.createWriteStream(absolutePath, {flags: "a"}); @@ -34,12 +34,15 @@ export default (req: Request, res: Response, next: NextFunction) => { `[LOGGER] An error occurred while writing to ${where}\n${error}`) }); sentences.forEach( (v) => ws.write(`${v}\n`) ) + await new Promise((resolve, reject) => { + ws.close(() => resolve()) + }) } if (zencode.match(Action.APPEND)) { const params = zencode.chunkedParamsOf(Action.APPEND, 2); for(const [ sentence, where ] of params) { - addLog([ result[sentence] || input[sentence] || sentence ], where); + await addLog([ result[sentence] || input[sentence] || sentence ], where); } } if (zencode.match(Action.APPEND_NAMED)) { @@ -50,7 +53,7 @@ export default (req: Request, res: Response, next: NextFunction) => { throw new Error( `[LOGGER] Could not find path to log ${pathName}`) } - addLog([ result[sentence] || input[sentence] || sentence ], logPath); + await addLog([ result[sentence] || input[sentence] || sentence ], logPath); } } if (zencode.match(Action.APPEND_ARRAY)) { @@ -61,7 +64,7 @@ export default (req: Request, res: Response, next: NextFunction) => { throw new Error( `[LOGGER] Could not find sentences array to log ${arrayName}`) } - addLog(sentences, + await addLog(sentences, result[where] || input[where] || where); } }