Skip to content

Commit 378f01a

Browse files
authored
docs: replace util.promisify with stream/promises (#544)
1 parent b313002 commit 378f01a

File tree

1 file changed

+8
-10
lines changed

1 file changed

+8
-10
lines changed

README.md

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,7 @@ npm i @fastify/multipart
2525
```js
2626
const fastify = require('fastify')()
2727
const fs = require('node:fs')
28-
const util = require('node:util')
29-
const { pipeline } = require('node:stream')
30-
const pump = util.promisify(pipeline)
28+
const { pipeline } = require('node:stream/promises')
3129

3230
fastify.register(require('@fastify/multipart'))
3331

@@ -50,7 +48,7 @@ fastify.post('/', async function (req, reply) {
5048
//
5149
// or
5250

53-
await pump(data.file, fs.createWriteStream(data.filename))
51+
await pipeline(data.file, fs.createWriteStream(data.filename))
5452

5553
// be careful of permission issues on disk and not overwrite
5654
// sensitive files that could cause security risks
@@ -89,7 +87,7 @@ fastify.register(require('@fastify/multipart'), {
8987

9088
For security reasons, `@fastify/multipart` sets the limit for `parts` and `fileSize` being _1000_ and _1048576_ respectively.
9189

92-
**Note**: if the file stream that is provided by `data.file` is not consumed, like in the example below with the usage of pump, the promise will not be fulfilled at the end of the multipart processing.
90+
**Note**: if the file stream that is provided by `data.file` is not consumed, like in the example below with the usage of pipeline, the promise will not be fulfilled at the end of the multipart processing.
9391
This behavior is inherited from [`@fastify/busboy`](https://github.com/fastify/busboy).
9492

9593
**Note**: if you set a `fileSize` limit and you want to know if the file limit was reached you can:
@@ -99,7 +97,7 @@ This behavior is inherited from [`@fastify/busboy`](https://github.com/fastify/b
9997

10098
```js
10199
const data = await req.file()
102-
await pump(data.file, fs.createWriteStream(data.filename))
100+
await pipeline(data.file, fs.createWriteStream(data.filename))
103101
if (data.file.truncated) {
104102
// you may need to delete the part of the file that has been saved on disk
105103
// before the `limits.fileSize` has been reached
@@ -122,7 +120,7 @@ Additionally, you can pass per-request options to the `req.file`, `req.files`,
122120
fastify.post('/', async function (req, reply) {
123121
const options = { limits: { fileSize: 1000 } };
124122
const data = await req.file(options)
125-
await pump(data.file, fs.createWriteStream(data.filename))
123+
await pipeline(data.file, fs.createWriteStream(data.filename))
126124
reply.send()
127125
})
128126
```
@@ -133,7 +131,7 @@ fastify.post('/', async function (req, reply) {
133131
fastify.post('/', async function (req, reply) {
134132
const parts = req.files()
135133
for await (const part of parts) {
136-
await pump(part.file, fs.createWriteStream(part.filename))
134+
await pipeline(part.file, fs.createWriteStream(part.filename))
137135
}
138136
reply.send()
139137
})
@@ -146,7 +144,7 @@ fastify.post('/upload/raw/any', async function (req, reply) {
146144
const parts = req.parts()
147145
for await (const part of parts) {
148146
if (part.type === 'file') {
149-
await pump(part.file, fs.createWriteStream(part.filename))
147+
await pipeline(part.file, fs.createWriteStream(part.filename))
150148
} else {
151149
// part.type === 'field
152150
console.log(part)
@@ -259,7 +257,7 @@ You can also define an `onFile` handler to avoid accumulating all files in memor
259257
async function onFile(part) {
260258
// you have access to original request via `this`
261259
console.log(this.id)
262-
await pump(part.file, fs.createWriteStream(part.filename))
260+
await pipeline(part.file, fs.createWriteStream(part.filename))
263261
}
264262

265263
fastify.register(require('@fastify/multipart'), { attachFieldsToBody: true, onFile })

0 commit comments

Comments
 (0)