Rate exceeded while running stop services script Fargate - javascript

Really need help with this AWS Fargate stopping script, Im receiving an error called "Rate exceeded" "ThrottilingException", is there a way to add that my script detects the error and if it sees the error, it will pause for a little bit and continue where it was left off?
var AWS = require("aws-sdk");
AWS.config.update({region: "eu-central-1"});
var s3 = new AWS.S3({apiVersion: '2006-03-01'});
var ecs = new AWS.ECS({apiVersion: '2014-11-13'});
async function getServiceArns(token) {
var params = {
cluster: 'my-cluster',
launchType: "FARGATE",
maxResults: 100,
nextToken: '',
schedulingStrategy: "REPLICA"
};
return new Promise(function(resolve) {
var services = []
if (token) {
params.nextToken = token
}
ecs.listServices(params, async function(err, data) {
if(data.nextToken) {
services.push(...(await getServiceArns(data.nextToken)))
}
services.push(...data.serviceArns)
resolve(services)
});
})
}
async function main(){
var arns = await getServiceArns()
var services = await Promise.all(arns.map(function(arn) {
return new Promise(function(resolve) {
ecs.describeServices({cluster: 'my-cluster', services:[arn]}, function(err, data) {
resolve({arn, count: data.services[0].desiredCount})
});
})
}))
var filtered = services.filter(item => item.count > 0)
var base64data = Buffer.from(JSON.stringify(filtered), 'binary');
await s3.putObject({
Bucket: 'my-bucket',
Key: 'services.txt',
Body: base64data,
}).promise();
await Promise.all(filtered.map(function(service) {
var params = {
desiredCount: 0,
cluster: "my-cluster",
service: service.arn,
};
console.log(service.arn)
// return ecs.updateService(params).promise()
}))
console.log('successfully shutdown all services')
}
exports.handler = main

Related

Using "return" in Async Google Cloud Functions

I am somewhat new to coding and recently created this script in order to pull data from Zoom and push it to Google Drive via API. I am trying to push this to a Google Cloud Function, but when running it in a Cloud Function and console logging each step in the process, it seems like the uploadFile function, specifically the drive.files.create method, is being skipped. Every other step is being console logged, but neither the err or res is being logged after the drive.files.create method. Google Cloud Functions does not show errors, instead it shows OK and that the function took 1500ms to execute. It works fine on my local machine, I am only having issues in Cloud Functions. Any suggestions on how to get this to act right would be super helpful. Thank you!
const axios = require("axios");
require("dotenv").config();
const stream = require("stream");
const request = require("request");
const { google } = require("googleapis");
const KEYFILEPATH = "./credentials.json";
const SCOPES = ["https://www.googleapis.com/auth/drive"];
const auth = new google.auth.GoogleAuth({
keyFile: KEYFILEPATH,
scopes: SCOPES,
});
let today = new Date().toISOString();
let zoomAccessToken;
let zoomDownloadUrl;
///////////////////////////////////////////////////////////////// Searching for latest Town Hall recording in Google.
const searchFile = async (auth) => {
const service = google.drive({ version: "v3", auth });
const files = [];
try {
const res = await service.files.list({
corpora: "drive",
includeItemsFromAllDrives: true,
supportsAllDrives: true,
driveId: "XXXXXXXXXXXXXXXX",
q: '"XXXXXXXXXXXXXXX" in parents',
fields: "nextPageToken, files(id, name)",
spaces: "drive",
});
Array.prototype.push.apply(files, res.files);
const filesArray = res.data.files;
const filesName = filesArray.map((x) => x.name).sort().reverse()[0];
console.log(filesName);
return filesName;
} catch (err) {
throw err;
}
};
///////////////////////////////////////////////////////////////// Get Zoom OAuth access token.
const getAccessToken = async () => {
return axios({
method: "post",
url: `https://zoom.us/oauth/token?grant_type=account_credentials&account_id=${process.env.ZOOM_ACCOUNT_ID}`,
headers: {
Authorization: "Basic" +new Buffer.from(process.env.ZOOM_CLIENT_ID + ":" + process.env.ZOOM_CLIENT_SECRET).toString("base64"),
},
});
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's data.
const getRecordingData = async () => {
const token = await getAccessToken();
zoomAccessToken = await token.data.access_token;
return axios({
method: "get",
url: "https://api.zoom.us/v2/meetings/XXXXXXXXX/recordings",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${zoomAccessToken}`,
},
});
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's date.
const getRecordingDate = async () => {
const recording = await getRecordingData();
const lastRecordingDate = await recording.data.start_time;
const recordingDateFormatted = `${lastRecordingDate.substring(0,4)}.${lastRecordingDate.substring(5, 7)}.${lastRecordingDate.substring(8,10)} - Town Hall.mp4`;
return recordingDateFormatted;
};
///////////////////////////////////////////////////////////////// Get the latest Town Hall recording's download link.
const zoomDownloadLink = async () => {
const recording = await getRecordingData();
zoomDownloadUrl = `${recording.data.recording_files[0].download_url}?access_token=${zoomAccessToken}`;
return zoomDownloadUrl;
};
///////////////////////////////////////////////////////////////// Upload data from latest Town Hall recording's download link to Google Drive.
const uploadFile = async (auth) => {
const buffer = await zoomDownloadLink();
const bs = new stream.PassThrough();
request(buffer).pipe(bs);
const drive = google.drive({ version: "v3", auth });
var fileMetadata = {
name: `${today.substring(0, 4)}.${today.substring(5, 7)}.${today.substring(8,10)} - Town Hall.mp4`,
parents: ["XXXXXXXXXXXXXXXXX"],
};
var media = {
mimeType: "video/mp4",
body: bs,
};
drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
uploadType: "resumable",
supportsAllDrives: true,
},
function (err, res) {
if (err) {
console.log(err);
} else {
console.log("File Id: ", res.data.id);
}
}
);
};
///////////////////////////////////////////////////////////////// Compares Town Hall files in Google Drive and Zoom. If different, run uploadFile function.
exports.townHall = async () => {
const townHallFile = await searchFile(auth);
const lastRecordingDate = await getRecordingDate();
if (townHallFile != lastRecordingDate) {
await uploadFile(auth);
} else {
console.log("No Recording Today");
}
};
As you are calling an API inside a cloud function which is an async function but does not have a return statement, it will only execute the function but doesn't wait for the response, because the drive.files.create call is running.
So to fix that just need to await the result of the API. Just add
return await statement on the API call
like:
const uploadFile = async (auth) => {
const buffer = await zoomDownloadLink();
const bs = new stream.PassThrough();
request(buffer).pipe(bs);
const drive = google.drive({ version: "v3", auth });
var fileMetadata = {
name: `${today.substring(0, 4)}.${today.substring(5, 7)}.${today.substring(8,10)} - Town Hall.mp4`,
parents: ["XXXXXXXXXXXXXXXXX"],
};
var media = {
mimeType: "video/mp4",
body: bs,
};
return await drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id",
uploadType: "resumable",
supportsAllDrives: true,
},
function (err, res) {
if (err) {
console.log(err);
} else {
console.log("File Id: ", res.data.id);
}
}
);
};
Also, something important when you are calling APIs inside cloud functions is the time out. Check on your CF time out is enough to wait for the API call response.
Also, you can use the Promise function to force wait for the response:
const result = uploadFile(aut);
const _response = await Promise.all(result);

Nodejs TypeError: Cannot read properties of undefined (reading 'refresh')

I need your help, it turns out that I am trying to use the Hubstaff api. I am working on nodejs to make the connection, I followed the documentation (official hubstaff api documentation) and use the methods they give as implementation examples (example of implementation nodejs).
But I get the following error:
I don't know why this happens, and I can't find more examples of how I can implement this api. The openid-client lib is used to make the connection through the token and a state follow-up is carried out to refresh the token.
To be honest, I'm not understanding how to implement it. Can someone who has already used this API give me a little explanation? I attach the code
hubstaffConnect.util
const {
Issuer,
TokenSet
} = require('openid-client');
const fs = require('fs');
const jose = require('jose');
// constants
const ISSUER_EXPIRE_DURATION = 7 * 24 * 60 * 60; // 1 week
const ACCESS_TOKEN_EXPIRATION_FUZZ = 30; // 30 seconds
const ISSUER_DISCOVERY_URL = 'https://account.hubstaff.com';
// API URl with trailing slash
const API_BASE_URL = 'https://api.hubstaff.com/';
let state = {
api_base_url: API_BASE_URL,
issuer_url: ISSUER_DISCOVERY_URL,
issuer: {}, // The issuer discovered configuration
issuer_expires_at: 0,
token: {},
};
let client;
function loadState() {
return fs.readFileSync('./configState.json', 'utf8');
}
function saveState() {
fs.writeFileSync('./configState.json', JSON.stringify(state, null, 2), 'utf8');
console.log('State saved');
}
function unixTimeNow() {
return Date.now() / 1000;
}
async function checkToken() {
if (!state.token.access_token || state.token.expires_at < (unixTimeNow() + ACCESS_TOKEN_EXPIRATION_FUZZ)) {
console.log('Refresh token');
state.token = await client.refresh(state.token);
console.log('Token refreshed');
saveState();
}
}
async function initialize() {
console.log('API Hubstaff API');
let data = loadState();
data = JSON.parse(data);
if (data.issuer) {
state.issuer = new Issuer(data.issuer);
state.issuer_expires_at = data.issuer_expires_at;
}
if (data.token) {
state.token = new TokenSet(data.token);
}
if (data.issuer_url) {
state.issuer_url = data.issuer_url;
}
if (data.api_base_url) {
state.api_base_url = data.api_base_url;
}
if (!state.issuer_expires_at || state.issuer_expires_at < unixTimeNow()) {
console.log('Discovering');
state.issuer = await Issuer.discover(state.issuer_url);
state.issuer_expires_at = unixTimeNow() + ISSUER_EXPIRE_DURATION;
console.log(state.issuer);
}
client = new state.issuer.Client({
// For personal access token we can use PAT/PAT.
// This is only needed because the library requires a client_id where as the API endpoint does not require it
client_id: 'PAT',
client_secret: 'PAT',
});
saveState();
console.log('API Hubstaff initialized');
}
async function request(url, options) {
await checkToken();
let fullUrl = state.api_base_url + url;
return client.requestResource(fullUrl, state.token, options);
}
function tokenDetails() {
let ret = {};
if (state.token.access_token) {
ret.access_token = jose.JWT.decode(state.token.access_token);
}
if (state.token.refresh_token) {
ret.refresh_token = jose.JWT.decode(state.token.refresh_token);
}
return ret;
}
module.exports = {
initialize,
checkToken,
request,
tokenDetails
};
controller
const usersGet = async(req, res = response) => {
const response = await api.request('v2/organizations', {
method: 'GET',
json: true,
});
const body = JSON.parse(response.body);
res.render('organizations', {
title: 'Organization list',
organizations: body.organizations || []
});
};

Nodejs AWS Lambda s3 getObject method returns nothing

The script used when trying to get contents from the csv stored in the s3 bucket
const mysql = require("mysql");
const fs = require("fs");
const { google } = require("googleapis");
const AWS = require("aws-sdk");
const client = new AWS.SecretsManager({ region: "eu-west-1" });
const analyticsreporting = google.analyticsreporting("v4");
const csv = require('ya-csv')
const fastCsv = require('fast-csv')
const s3 = new AWS.S3();
const getParams = {
Bucket: 'data',
Key: 'athena_test/nameplate.csv'
};
exports.handler = async (context, event) => {
const data = await s3.getObject(getParams, function (err, data){
if(err){console.log("ERROR: ",err)}
else {return data}
})
console.log(data.Body)
}
the console log returns undefined rather than the contents of the csv
Hey you can try this one:-
const csv = require('#fast-csv/parse');
const s3Stream = await s3.getObject(params).createReadStream();
const data = await returnDataFromCSV();
console.log(data.Body);
const returnDataFromCSV =()=> {
let promiseData = new Promise((resolve, reject) => {
const parser = csv
.parseStream(csvFile, { headers: true })
.on("data", (data) => {
console.log('Parsed Data:-', data);
})
.on("end", ()=> {
resolve("CSV finished here");
})
.on("error",()=> {
reject("if failed");
});
});
try {
return await promiseData;
} catch (error) {
console.log("Get Error: ", error);
return error;
}
}
CreateStream: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#createReadStream-property

Can't get the data from the stream

I have some code that in which I build a minIO client and the I try to access an object through the getObject method and the perform other tasks. The thing is I can't get the data from the stream. I have tried to with Buffer.from and push the data in the buff array. No result. Any thoughts??? Thank you!
The code is below:
var Minio = require('minio')
var Buffer = require('buffer').Buffer;
async function call(){
var minioClient = new Minio.Client({
endPoint: 'localhost',
port: 9000,
useSSL: false,
accessKey: 'admin',
secretKey: 'password'
});
minioClient.listBuckets(function (err, buckets) {
if (err)
return console.log(err);
console.log('buckets :', buckets);
});
var buff = [];
var size = 0;
await minioClient.getObject("test2", "test.jpg").then( function(dataStream) {
dataStream.on('data', async function(chunk) {
buff.push(chunk)
size += chunk.length
})
dataStream.on('end', function() {
console.log('End. Total size = ' + size)
console.log("End Buffer : " + buff)
})
dataStream.on('error', function(err) {
console.log(err)
})
}).catch((err)=>console.log(err));
console.log("Buffer = " + buff);
return buff;
}
var data = call();
console.log("Data: " + data);
the reponse is below:
Data: [object Promise]
buckets : [
{ name: 'test1', creationDate: 2021-08-25T18:36:40.544Z },
{ name: 'test2', creationDate: 2021-08-25T19:42:47.558Z }
]
End. Total size = 3844
End Buffer: ����►JFIF☺☺☺����☺vPhotoshop 3.08BIM♦♦☺N∟☻☻☻∟☻¶ww.public-domain-image.com∟☻t,Murphy Karen, U.S. Fish and Wildlife Service∟☻zww.public-domain-im
age.com∟☻(�Please consider to donate and to link back to http://www..public-domain-image.com also give credit for this and any other pictures you used.∟☻A
"http://www.public-domain-image.com∟☻18991230∟☻<♂000000+01008BIM♦♂��C♥☻☻♥☻☻♥♥♥♥♦♥♥♦♣♣♦♦♣
♀
♀♀♂
¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶¶����♥☺"☻◄☺♥◄☺��▼☺♣☺☺☺☺☺☺☺☻♥♦♣♠
♂���►☻☺♥♥☻♦♥♣♣♦♦☺}☺☻♥♦◄♣↕!1A♠‼Qa"q¶2��#B��§R��$3br�
▬↨↑↓→%&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz���������������������������������������������������������������������������▼☺♥☺☺☺☺☺☺☺☺☺☺☻♥♦♣♠
♂���◄☻☺☻♦♦♥♦♣♦♦☺☻w☺☻♥◄♦♣!1♠↕AQaq‼"2¶B���� #3R�§br�
▬$4�%�↨↑↓→&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz��������������������������������������������������������������������������♀♥☺☻◄♥◄?���M∟q�VŞ�↨▼-hY��oO
Ҵ���0�§̏a2���↑`/?J�ob#lm▼�^����▲♣Zk}�☼CBв�ه♀
♀{��֡r�F>��→|����T�↔���-��RXm���6zRǤ^2)►�3�"�[M?ΙC(�z�d�X���c∟V�:∟�����6gA��8��o��"y⌂�8<
y�q�x�c�=☼j��↔M▼♫�>L`�zUۭ♠%�8DeR0♫}k☼D�y/e�k�펽8�(#�#��%�m��r驔w:�☼٫FP(�ۊ��4peO�~U�౟)Xd→�
☺��G�r�j��4§�►�~n��t��3*�9◄�d:y���zԏK'♀=3�U�q\
��i→�?J�9O;�∟�ٔ�¶�m��~u�Gf:c"��¿�i�→\Æ�����↔=�a��z��:n1�Vc���L��Br�dZ��L↑���w◄�;V�Yv�2K'�]�!O§�W!��� |��§��_K,�B=��$��~N����U�>����n�§�↑&a)3'K�o8<h��0
����G�7↨♥.rߕI���R�<v���K[��iJ�#v��~�▬`�¨�B���U��U=M[x�����X�♠�z�+��ע��⌂F��Gj���a�1�>ұ4�6�↑�nF��♀◄\�M;�IE=ϋ���"�▼ː8����J͹#�kҌ�3�D♂▲��L��o�M�Q��z|ڛ'tX��☼ީ�◄
�♣W↕mۃ��ٮq�↔h��(�pĜ��!�n�U♂����¶���Q�+�;∟���Eh����q�rI��5NI<�·▲�j�Bw�=�G+lDS���aj��c▲��]<(#ҹ�{���mX�.��\�Vu�tGc�8N���7��☼5��� �����(∟�*3����Y�,Y�9�◄x
�ڹ(n�j��▼'�§�ܩ7���x♫?:��`pX‼^/�
�T����?)������Sqc�♂��3Ϧ=k�Q�s�▬2���*�▬▼�Y�Ѝ‼�=s^/�
�P�↓�.↑��⌂�[o☺�sD#o►�§▲���G,{��=^mj�,��4♥��☻�7�l$�↨��♫?�☼�!��36���_�'��∟��A⌂g��#�↨JO�☼�↔�5����;��\B�(�♦���3��z��q�%������
ci���y, ��§.��Kx→�M�X�y�3F�☼8�G��x���:Yۻ_�#�ƕr��I�YTM�߰#�▼Ҳӫ4�>=☼����;������T�<���������O�Jl:��3־t�⌂d▼☼I!W�/�G'��
�t�؏�ן�uV�����♣X���>↑�pm���~hʓ�§�=q����&��`o��[VmgWq�]W���]f��♦��^�t���X♫�+���G����EN��♂�F1��ULd▼ҩ��2~���~�GN9�n�>C��H��#�cm�u�+1f�9����4���Ea֟�g§��▲�ȩ���
�
t�0�m�W?�s���ţ�4�Y�#�N~���▼Xܫ�↑��`��׍�ѕmȥJ������1#?19♦(������↓4���<��♥7J<�܌�∟��:zn8?ʻ☼;�=�ƭ��CScb2I���u2����9"=¶�c��Wx8�29§q��}☼���C����
QD��⌂.jm�^j����g>�♦�:
��♂ o��B��4�$\��ٹ8�9Q�E�{a!�Q�Z�l�/|TV�nS�sV�✔!s�����`�E�/O��;q��Mi��1�}�ǡ�M▬�♥☻I�↕���5�[�▲B�,:m/�����=*qF֙n#���↔8?S]%�1�! ���
�↑���#��V♂�1�ܲ�V�2D��I►���r�3�ֹӻ:,|��↔�+�u`ib�]�,�I��X��m ����W▬�I>�♫���5w����∟��$#�y�O��V`�↨ܟ����!fY1��↔vը��♫��h�%HԊm�u��]�+Hw∟���L�?J�↨↕6�߇?�7�▼0���Lw5▬
�qA.E֝���Gc��D�▲☺+�ʫ_�z���☼���ݕ�}*���9→��§A!�♥�♥?έ[� 92↨���
�s�WIiek$�2∟Ɩhś∟�§H'����E�m↨m�����;�w��b����⌂♥|♥�⌂��t▬q�→eNJ↓ss;�#ʌG1��g↑↔k�T����/�5���u=8˗���ac:�u���#j�G�C��k��i�♥�♀[���}��?�}�&�k7��h_���U;�
�ׇ3ivr���S�+?�4�4���∟↨%[���F�♣�F♥1s�v����ކ±�↑-�Ҵ�,�\▲��R<yH݉��.s�槎c#r�O�d�6��%�:U�9�p3�8§fw4�઀ͳ�ˌ�b̽�>�C�c�F;�O;z��n~�h♫cR6��_�Ҭ�Ŕ.q�:VLst���Y�G�ڋ�r���54
sw^���16�►☼�J_�,��♂.s�♫*�K�6waq���M��.t����,↨AYC��H##�‼��o���$a��t�*��↓↑�M�⌂�A�֨Jz�����
↕�a�]E��ۇux�*2Us�^}I4�g�Nw��փ�Qx�O��P�R�c�<�)?���Ew�?�֡j���{[����v����→��H�W���x�nLHǟ�♥���K���%�>P˟�-�z�:�☻�֚z�k���
A running example. to get the the data as you need it.
var minioClient = new Minio.Client({
endPoint: 'play.min.io',
port: 9000,
useSSL: true,
accessKey: 'Q3AM3UQ867SPQQA43P2F',
secretKey: 'zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG'
})
async function call() {
const promise = new Promise((resolve, reject) => {
var buff = [];
var size = 0;
minioClient.getObject("sph-my-bucket", "test-replication.txt").then(function(dataStream) {
dataStream.on('data', async function(chunk) {
buff.push(chunk)
size += chunk.length
})
dataStream.on('end', function() {
console.log('End. Total size = ' + size)
// console.log("End Buffer : " + buff)
resolve(buff)
})
dataStream.on('error', function(err) {
console.log(err)
reject(err)
})
}).catch(reject);
})
return promise
}
async function getData() {
const data = await call()
console.log(data.toString())
}
getData()
Async/await is basically a sintax sugar for Promises. But you have to keep in mind that an asyncfunction always returns a Promise, even if you explicitly don't wrap its return in one.
With this said, as we know, if we want to retrieve the information, just call Promise.then() and assign the result to your global variable. Note: setTimeout just adds a delay to until the Promise fullfill.
async function getAsync() {
var response = await fetch('https://placekitten.com/500/500');
var data = await response.text();
return data;
}
var myData;
getAsync().then(data => myData = data);
setTimeout(() => console.log(myData), 3e3)

Connection resets after 60 seconds in node.js upload application

I've written an application in node.js consisting of a server and a client for storing/uploading files.
For reproduction purposes, here's a proof of concept using a null write stream in the server and a random read stream in the client.
Using node.js 12.19.0 on Ubuntu 18.04. The client depends on node-fetch v2.6.1.
The issue I have is after 60 seconds the connection is reset and haven't found a way to make this work.
Any ideas are appreciated.
Thank you.
testServer.js
// -- DevNull Start --
var util = require('util')
, stream = require('stream')
, Writable = stream.Writable
, setImmediate = setImmediate || function (fn) { setTimeout(fn, 0) }
;
util.inherits(DevNull, Writable);
function DevNull (opts) {
if (!(this instanceof DevNull)) return new DevNull(opts);
opts = opts || {};
Writable.call(this, opts);
}
DevNull.prototype._write = function (chunk, encoding, cb) {
setImmediate(cb);
}
// -- DevNull End --
const http = require('http');
const server = http.createServer();
server.on('request', async (req, res) => {
try {
req.socket.on('end', function() {
console.log('SOCKET END: other end of the socket sends a FIN packet');
});
req.socket.on('timeout', function() {
console.log('SOCKET TIMEOUT');
});
req.socket.on('error', function(error) {
console.log('SOCKET ERROR: ' + JSON.stringify(error));
});
req.socket.on('close', function(had_error) {
console.log('SOCKET CLOSED. IT WAS ERROR: ' + had_error);
});
const writeStream = DevNull();
const promise = new Promise((resolve, reject) => {
req.on('end', resolve);
req.on('error', reject);
});
req.pipe(writeStream);
await promise;
res.writeHead(200);
res.end('OK');
} catch (err) {
res.writeHead(500);
res.end(err.message);
}
});
server.listen(8081)
.on('listening', () => { console.log('Listening on port', server.address().port); });
testClient.js
// -- RandomStream Start --
var crypto = require('crypto');
var stream = require('stream');
var util = require('util');
var Readable = stream.Readable;
function RandomStream(length, options) {
// allow calling with or without new
if (!(this instanceof RandomStream)) {
return new RandomStream(length, options);
}
// init Readable
Readable.call(this, options);
// save the length to generate
this.lenToGenerate = length;
}
util.inherits(RandomStream, Readable);
RandomStream.prototype._read = function (size) {
if (!size) size = 1024; // default size
var ready = true;
while (ready) { // only cont while push returns true
if (size > this.lenToGenerate) { // only this left
size = this.lenToGenerate;
}
if (size) {
ready = this.push(crypto.randomBytes(size));
this.lenToGenerate -= size;
}
// when done, push null and exit loop
if (!this.lenToGenerate) {
this.push(null);
ready = false;
}
}
};
// -- RandomStream End --
const fetch = require('node-fetch');
const runSuccess = async () => { // Runs in ~35 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(256e6) // new RandomStream(1024e6)
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
const runFail = async () => { // Fails after 60 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(1024e6)
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
// runSuccess().then(() => process.exit(0));
runFail().then(() => process.exit(0));
I tried (unsuccessfully) to reproduce what you are seeing based on your code example. Neither the success call is completing in ~35 seconds nor is the error being thrown in 60 seconds.
However, that being said, I think what is happening here is that your client is terminating the request.
You can increase the timeout by adding a httpAgent to the fetch PUT call. You can then set a timeout in the httpAgent.
const http = require('http');
...
const runFail = async () => { // Fails after 60 seconds
const t = Date.now();
try {
const resp = await fetch('http://localhost:8081/test', {
method: 'PUT',
body: new RandomStream(1024e6),
agent: new http.Agent({ keepAlive: true, timeout: 300000 })
});
const data = await resp.text();
console.log(Date.now() - t, data);
} catch (err) {
console.warn(Date.now() - t, err);
}
};
See the fetch docs for adding a custom http(s) agent here
See options for creating http(s) agent here
This turned out to be a bug in node.js
Discussion here: https://github.com/nodejs/node/issues/35661

Categories