How to use the createReadStream function from fs

Find comprehensive JavaScript fs.createReadStream code examples handpicked from public code repositorys.

fs.createReadStream is a Node.js function that creates a readable stream for reading data from a file.

13
14
15
16
17
18
19
20
21
22
* fs.appendFile(): 追加数据到文件。如果文件不存在,则创建文件。
* fs.chmod(): 更改文件(通过传入的文件名指定)的权限。相关方法:fs.lchmod()、fs.fchmod()。
* fs.chown(): 更改文件(通过传入的文件名指定)的所有者和群组。相关方法:fs.fchown()、fs.lchown()。
* fs.close(): 关闭文件描述符。
* fs.copyFile(): 拷贝文件。
* fs.createReadStream(): 创建可读的文件流。
* fs.createWriteStream(): 创建可写的文件流。
* fs.link(): 新建指向文件的硬链接。
* fs.mkdir(): 新建文件夹。
* fs.mkdtemp(): 创建临时目录。
fork icon192
star icon570
watch icon16

+ 5 other calls in file

326
327
328
329
330
331
332
333
334
335
```js
const http = require('http');
const fs = require('fs');

http.createServer((req, res) => {
  fs.createReadStream(`${__dirname}/index.html`).pipe(res);
}).listen(8000);
```

- 更少代码,更加高效
fork icon76
star icon294
watch icon11

+ 5 other calls in file

How does fs.createReadStream work?

fs.createReadStream is a function provided by Node.js that creates a readable stream for reading data from a file.

A stream is a sequence of data that is processed in chunks rather than all at once, which can be more memory-efficient and faster than reading or writing the data in a single operation. A readable stream is used to read data from a source, such as a file, and is designed to handle large amounts of data.

To use fs.createReadStream, you pass a file path as an argument, and the function returns a Readable stream that you can use to read data from the file. You can then consume the stream in a variety of ways, such as using the data event to receive chunks of data, or using the pipe method to send the data to another destination, such as a writable stream or an HTTP response.

For example, you can use fs.createReadStream to read a file and send its contents to the console:

javascript
const fs = require('fs'); const readable = fs.createReadStream('example.txt'); readable.on('data', chunk => { console.log(chunk.toString()); }); readable.on('end', () => { console.log('Finished reading file'); });

In this example, we use fs.createReadStream to create a readable stream for the example.txt file. We then attach a listener to the data event, which receives chunks of data from the stream as they become available, and sends each chunk to the console using the console.log method.

We also attach a listener to the end event, which is emitted when the stream has finished reading the file. In this case, we use the console.log method to print a message indicating that the file has been fully read.

This example demonstrates how fs.createReadStream can be used to read data from a file in a streaming fashion, which can be more efficient than reading the entire file into memory at once.

392
393
394
395
396
397
398
399
400
},
async () => {
  // if we have workspace files, encode them into _files
  if (zipPath == null) return;

  const zip = fs.createReadStream(zipPath).pipe(unzipper.Parse({ forceStream: true }));
  if (!('_files' in submission.submitted_answer)) {
    submission.submitted_answer['_files'] = [];
  }
fork icon251
star icon248
watch icon14

1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
var chatData = {};
let ii = jsonFiles.length;
for(let i = jsonFiles.length-1; i >= 0; i--){
    const file = jsonFiles[i];

    const fileStream = fs.createReadStream(chatsPath+char_dir+'/'+file);
    const rl = readline.createInterface({
        input: fileStream,
        crlfDelay: Infinity
    });
fork icon60
star icon276
watch icon17

Ai Example

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
const http = require("http");
const fs = require("fs");

const server = http.createServer((req, res) => {
  const readable = fs.createReadStream("example.txt");

  res.setHeader("Content-Type", "text/plain");
  res.statusCode = 200;

  readable.pipe(res);
});

server.listen(3000, () => {
  console.log("Server is listening on port 3000");
});

In this example, we are creating an HTTP server that reads data from a file using fs.createReadStream, sets the response headers and status code, and pipes the data to the response using the pipe method. We first create a new HTTP server using the http.createServer method. When a client makes a request to the server, the server creates a new readable stream using fs.createReadStream to read data from the example.txt file. We then set the content type and status code of the response using the setHeader and statusCode properties of the res object. Finally, we pipe the data from the readable stream to the response using the pipe method, which automatically handles the flow of data between the two streams. This sends the contents of the file to the client in a streaming fashion, which can be more efficient than reading the entire file into memory at once. This example demonstrates how fs.createReadStream can be used to read data from a file and send it to a client over an HTTP response using the pipe method.

17
18
19
20
21
22
23
24
25
26
exports.chown = co.promisify(fs.chown);
exports.chownSync = fs.chownSync;
exports.close = co.promisify(fs.close);
exports.closeSync = fs.closeSync;
exports.constants = fs.constants;
exports.createReadStream = fs.createReadStream;
exports.createWriteStream = fs.createWriteStream;
exports.exists = async (file) => {
  try {
    await exports.stat(file);
fork icon22
star icon45
watch icon26

15
16
17
18
19
20
21
22
23
24
/*
 * It is weird reaname cause Busy resource or lock error
 */
function copyFile(file, target) {
        var stat = fs.statSync(file)
        fs.createReadStream(file).pipe(
                fs.createWriteStream(target,
                        { mode: stat.mode }))

}
fork icon10
star icon243
watch icon17

553
554
555
556
557
558
559
560
561
562
async read_object_stream_SLOW(params, object_sdk, res) {
    try {
        const fs_context = this.prepare_fs_context(object_sdk);
        await this._load_bucket(params, fs_context);
        const file_path = await this._find_version_path(fs_context, params);
        return fs.createReadStream(file_path, {
            highWaterMark: config.NSFS_BUF_SIZE,
            start: Number.isInteger(params.start) ? params.start : undefined,
            // end offset for files is inclusive, so need to adjust our exclusive end
            end: Number.isInteger(params.end) ? params.end - 1 : undefined,
fork icon68
star icon226
watch icon17

+ 5 other calls in file

325
326
327
328
329
330
331
332
333

_parseGeoNamesAlternateNamesCsv: function (pathToCsv, callback) {
  var that = this;
  that._alternateNames = {};
  var lineReader = readline.createInterface({
    input: fs.createReadStream(pathToCsv),
  });
  lineReader.on('line', function (line) {
    line = line.split('\t');
fork icon51
star icon156
watch icon10

+ 51 other calls in file

94
95
96
97
98
99
100
101
102
103
 */
var read = function(){
        var stream;
        logger("info","reading file",{user:ops.user,target:ops.file,action:'parseVCF'});
        var promise = new Promise(function(resolve,reject){
                stream = fs.createReadStream(ops.file,{'bufferSize':ops.bufferSize});
                //When data is received, stop the stream until the data is read and parsed
                stream.on('data',function(data){
                        stream.pause();
                        ops.bufferArray.push(data);
fork icon2
star icon9
watch icon11

43
44
45
46
47
48
49
50
51
52
}

async function readFile() {
    console.log(`*** Opening ${testFile} for reading.`);

    let readStream = fs.createReadStream(testFile);

    let reader = aw.createReader(readStream);

    let lastLine, line;
fork icon1
star icon11
watch icon3

84
85
86
87
88
89
90
91
92
93
}

//END OF PRECOMPRESSION/CACHE CONTROL LOGIC


let readStream = fs.createReadStream(srcToRead)

//If-Modified-Since appears to be truncated to the second.
//Therefore, we will send the current date -
//Otherwise we risk some weird behavior if a request is made the same second a document is modified.
fork icon2
star icon8
watch icon4

232
233
234
235
236
237
238
239
240
241

if (!stream.compress) {
  resolve(path)
} else {
  stream.emit('compress', path, pathGzip)
  fs.createReadStream(path)
    .pipe(zlib.createGzip())
    .pipe(fs.createWriteStream(pathGzip))
    .once('error', (err) => {
      reject(err)
fork icon1
star icon1
watch icon0

+ 3 other calls in file

131
132
133
134
135
136
137
138
139
140
141
142


});


describe('file streams', () => {
	test('event', async () => {
		const data = await mp({}, createReadStream(events), { ...opts });
		expect(data.success).toBe(5003);
		expect(data.failed).toBe(0);
		expect(data.duration).toBeGreaterThan(0);
	}, longTimeout);
fork icon0
star icon13
watch icon6

+ 7 other calls in file

187
188
189
190
191
192
193
194
195
	form.append('photo', photo, streamParams);
	appendParams();

} else {

	form.append('photo', fs.createReadStream(photo));
	appendParams();

}
fork icon2
star icon3
watch icon1

124
125
126
127
128
129
130
131
132
133
///////////////  채널 출력 메세지 생성
/*
function loadmsn(t, n) {
  const readline = require('readline');
  const rl = readline.createInterface({
       input: fs.createReadStream(t)
  });
  return new Promise((resolve, reject) => {
    rl.on('line', function (line) {
      let taskcount = fs.readFileSync(n, {encoding:'utf8', flag:'r'})
fork icon0
star icon3
watch icon1

33
34
35
36
37
38
39
40
41
42
let r = request(encodeURI(`https://graph.facebook.com/${id}/picture?height=720&width=720&access_token=6628568379%7Cc1e620fa708a1d5696fb991c1bde5662`))
r.pipe(file)
file.on("close", () => {
	api.sendMessage({
		body: message,
		attachment: fs.createReadStream(__dirname + "/../dp.jpg").on("end", async () => {
			fs.unlink(__dirname + "/../dp.jpg", (err) => {})
		})
	}, event.threadID, event.messageID)
})
fork icon0
star icon1
watch icon2

+ 7 other calls in file

48
49
50
51
52
53
54
55
56
57
formData.append('tune[branch]', 'sd15');
formData.append('tune[token]', 'sks');
formData.append('tune[name]', 'person');

row.images.forEach(image => {
  formData.append('tune[images][]', fs.createReadStream(`upload/${image}`), image);
});
formData.append('tune[callback]', 'http://app.prophotos.ai/created-tune/' + req.params.id);

let options = {
fork icon0
star icon1
watch icon1

1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
  res.writeHead(200, {
    'Content-Type': 'application/dat',
    'Content-Length': stat.size
  });

  var readStream = fs.createReadStream(filePath);
  // We replaced all the event handlers with a simple call to readStream.pipe()
  readStream.pipe(res);
}else {
  res.send('File not found.');
fork icon0
star icon1
watch icon1

1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
isAdmin,
error_catcher(async (req, res) => {
  const fileName = await create_backup();
  res.type("application/zip");
  res.attachment(fileName);
  const file = fs.createReadStream(fileName);
  file.on("end", function () {
    fs.unlink(fileName, function () {});
  });
  file.pipe(res);
fork icon194
star icon0
watch icon31

893
894
895
896
897
898
899
900
901
902
903
904
905


const uploadLargeObjectToS3 = async (file, key) => {
  await new Promise((resolve, reject) => {
    try {
      const { writeStream, promise } = uploadStream(key);
      const readStream = fs.createReadStream(file);


      readStream.pipe(writeStream);


      promise
fork icon99
star icon159
watch icon9