How to make AWS S3 request retryable? - javascript

Sometimes when I download multiple files from S3 bucket using node sdk, the request will timeout for one of the downloads. I would like for the request to just retry to attempt to download again.
The json error response says retryable: false.
Is there a way I can configure it to be true?
Here is the error:
{ TimeoutError: Connection timed out after 480000ms
at ClientRequest.<anonymous> (node_modules/aws-sdk/lib/http/node.js:83:34)
at Object.onceWrapper (events.js:272:13)
at ClientRequest.emit (events.js:180:13)
at ClientRequest.emit (domain.js:421:20)
at TLSSocket.emitTimeout (_http_client.js:703:34)
at Object.onceWrapper (events.js:272:13)
at TLSSocket.emit (events.js:180:13)
at TLSSocket.emit (domain.js:421:20)
at TLSSocket.Socket._onTimeout (net.js:396:8)
at ontimeout (timers.js:466:11)
at tryOnTimeout (timers.js:304:5)
at Timer.listOnTimeout (timers.js:267:5)
message: 'Connection timed out after 480000ms',
code: 'TimeoutError',
time: 2019-04-01T17:58:41.010Z,
region: 'us-west-2',
hostname: 'bucket-name',
retryable: false,
statusCode: 200,
retryDelay: 129.76727762396757 }

I am not sure of how you are using the resources as you didn't share your code, but this might help you.
// setting retries
var s3 = new AWS.S3({apiVersion: '2006-03-01', maxRetries:10});
S3 sdk documentation - maxRetries

Related

node.js get https no responding 504

I am trying to make an https request in my backend node.js web app. I have the following code:
const express = require('express');
const https = require('https');
const app = express();
app.get("/", function(req, res) {
const url = "https://www.tkmaxx.com/uk/en/women/edits/big-brand-drop/c/01240000/autoLoad?page=1"
https.get(url, function(response) {
console.log(response.statusCode);
});
res.send("running test")
});
app.listen(3000, function() {
console.log("Server started on port 3000");
});
I am getting the following error:
node:events:504
throw er; // Unhandled 'error' event
^
Error: socket hang up
at connResetException (node:internal/errors:691:14)
at TLSSocket.socketOnEnd (node:_http_client:466:23)
at TLSSocket.emit (node:events:538:35)
at endReadableNT (node:internal/streams/readable:1345:12)
at processTicksAndRejections (node:internal/process/task_queues:83:21)
Emitted 'error' event on ClientRequest instance at:
at TLSSocket.socketOnEnd (node:_http_client:466:9)
at TLSSocket.emit (node:events:538:35)
at endReadableNT (node:internal/streams/readable:1345:12)
at processTicksAndRejections (node:internal/process/task_queues:83:21) { code: 'ECONNRESET' }
Anyone know what's going on? Is the issues related to request headers or user agent? How can I set that?
The request is only accepted by the remote server if it has an Accept header and also Connection: keep-alive. (These are headers a browser typically sets.)
https.get("https://www.tkmaxx.com/uk/en/women/edits/big-brand-drop/c/01240000/autoLoad?page=1", {
headers: {
accept: "text/html",
connection: "keep-alive"
}
}, function(response) {...});
(Perhaps this is a mechanism which the remote server employs to guard against requests made by clients other than browsers?)

Getting This Error When Trying To Connect To MongoDB database

codeNot sure why Im getting this error as it was working the last time I opened my project but this is what I get when I attempt to connect to my database.mongo error,jpg
Error Code
PS C:\Users\Joseph\Desktop\server> node server.js
Server is running on port:${port}
(node:18576) DeprecationWarning: current Server Discovery and Monitoring engine is deprecated, and will be removed in a future version. To use the new Server Discover and Monitoring engine, pass option { useUnifiedTopology: true } to
the MongoClient constructor.
(Use `node --trace-deprecation ...` to show where the warning was created)
C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\topologies\server.js:438
new MongoNetworkError(
^
MongoNetworkError: failed to connect to server [cluster0-shard-00-02.qqrcr.mongodb.net:27017] on first connect [MongoError: bad auth : Authentication failed.
at Connection.messageHandler (C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\connection.js:359:19)
at Connection.emit (node:events:379:20)
at processMessage (C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\connection.js:451:10)
at TLSSocket.<anonymous> (C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\connection.js:620:15)
at TLSSocket.emit (node:events:379:20)
at addChunk (node:internal/streams/readable:313:12)
at readableAddChunk (node:internal/streams/readable:288:9)
at TLSSocket.Readable.push (node:internal/streams/readable:227:10)
at TLSWrap.onStreamRead (node:internal/stream_base_commons:190:23) {
ok: 0,
code: 8000,
codeName: 'AtlasError'
}]
at Pool.<anonymous> (C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\topologies\server.js:438:11)
at Pool.emit (node:events:379:20)
at C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\pool.js:562:14
at C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\pool.js:1009:9
at callback (C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\connect.js:75:5)
at C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\connect.js:147:27
at C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\auth\scram.js:185:14
at _callback (C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\connection.js:328:7)
at Connection.messageHandler (C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\connection.js:359:9)
at Connection.emit (node:events:379:20)
at processMessage (C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\connection.js:451:10)
at TLSSocket.<anonymous> (C:\Users\Joseph\Desktop\node_modules\mongodb\lib\core\connection\connection.js:620:15)
at TLSSocket.emit (node:events:379:20)
at addChunk (node:internal/streams/readable:313:12)
at readableAddChunk (node:internal/streams/readable:288:9)
at TLSSocket.Readable.push (node:internal/streams/readable:227:10)
PS C:\Users\Joseph\Desktop\server>

Firebase Functions and Autodesk Forge integration

I have a problem with a fetch request inside Firebase Functions for the Autodesk Forge Token.
Here is the error that is showing on functions registrations:
FetchError: request to https://developer.api.autodesk.com/authentication/v1/authenticate failed,
reason: getaddrinfo EAI_AGAIN developer.api.autodesk.com:443
at ClientRequest.<anonymous> (/srv/node_modules/node-fetch/lib/index.js:1455:11)
at emitOne (events.js:116:13)
at ClientRequest.emit (events.js:211:7)
at TLSSocket.socketErrorListener (_http_client.js:401:9)
at emitOne (events.js:116:13)
at TLSSocket.emit (events.js:211:7)
at emitErrorNT (internal/streams/destroy.js:66:8)
at _combinedTickCallback (internal/process/next_tick.js:139:11)
at process._tickDomainCallback (internal/process/next_tick.js:219:9)
I have already tried inserting the Forge API inside my react js project, and figure it out it would be a CORS problems.
const snapshot = change.after;
console.log(snapshot)
const api = "https://developer.api.autodesk.com/authentication/v1/authenticate"
const search = () =>
fetch(`${api}`, {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
},
body: JSON.stringify(`client_id=${process.env.REACT_APP_FORGE_CLIENT_ID}&client_secret=${process.env.REACT_APP_FORGE_CLIENT_SECRET}&grant_type=client_credentials&scope=data:read`)
}).then(res => res.json())
search().then((res) => {
const data = res
return snapshot.ref.parent.child('token').set(data);
})
})
Since Firebase Functions runs in the GCD backend CORS does not really come into play ...
You must be on the free plan - getaddrinfo EAI_AGAIN indicates a DNS lookup timeout and is due to the limitations of the free tier where outbound networking is limited to within Google services. Upgrade your plan to Flame or Blaze.

How to add files in a POST using request-promise of npm?

I'm tryng to send a Curl request using "request-promise" of npm. The Curl that I want to send is as follows:
`curl \
-H "Content-Type: multipart/form-data" \
-F "original=#./${parent_path}" \
-F "modified=#./${version_path}" \
-o "${out_path}" \
${URI}`
My code in node is:
BIMFile.findOne({ _id: responseDB.parent_id })
.then(parent => {
parent_path = parsePath(parent.path);
version_path = parsePath(responseDB.path);
console.log("PARENT!", parent_path, version_path);
const URI =
`${protocol}://${host_img_diff}:${port_img_diff}/diff`
out_path = version_path + '.tmp.jpg';
request.post({
url: URI,
formData: {
file: fs.createReadStream(parent_path),
file: fs.createReadStream(version_path)
}
}).then((apiResponse) => {
console.log('apiUPDATEResponse', apiResponse);
})
The result is:
Unhandled rejection StatusCodeError: 400 - "<!DOCTYPE HTML PUBLIC
\"-//W3C//DTD HTML 3.2 Final//EN\">\n<title>400 Bad
Request</title>\n<h1>Bad Request</h1>\n<p>The browser (or proxy) sent a re
quest that this server could not understand.</p>\n"
at new StatusCodeError (/backend/node_modules/request-promise-
core/lib/errors.js:32:15)
at Request.plumbing.callback (/backend/node_modules/request-promise-
core/lib/plumbing.js:104:33)
at Request.RP$callback [as _callback] (/backend/node_modules/request-
promise-core/lib/plumbing.js:46:31)
at Request.self.callback (/backend/node_modules/request/request.js:185:22)
at emitTwo (events.js:126:13)
at Request.emit (events.js:214:7)
at Request.<anonymous> (/backend/node_modules/request/request.js:1161:10)
at emitOne (events.js:116:13)
at Request.emit (events.js:211:7)
at IncomingMessage.<anonymous>
(/backend/node_modules/request/request.js:1083:12)
at Object.onceWrapper (events.js:313:30)
at emitNone (events.js:111:20)
at IncomingMessage.emit (events.js:208:7)
at endReadableNT (_stream_readable.js:1055:12)
at _combinedTickCallback (internal/process/next_tick.js:138:11)
at process._tickCallback (internal/process/next_tick.js:180:9)
The server returns the following message:
xxx.xx.x.xx - - [05/Apr/2019 10:00:15] "POST /diff HTTP/1.1" 400 -
As you can see the server couldn't understand the post request. Does anyone know how to add files correctly?
Use send files using formData as you're doing, but one of the issues you have is that you're setting both files in the same property, and only the last one is set.
console.log({ file: 1, file: 2 });
So if file can receive multiple files, you need to use an array
const formData = {
file: [
fs.createReadStream(parent_path),
fs.createReadStream(version_path)
]
}
If you need additional meta-data, request module provides a way too:
Pass optional meta-data with an 'options' object with style:
{value: DATA, options: OPTIONS}
Use case: for some types of
streams, you'll need to provide "file"-related information manually.
See the form-data README for more information about options:
https://github.com/form-data/form-data

How to create an Azure AppendBlob from node.js

I have installed the npm azure-storage package.
On Azure I have created a Storage Account and a container.
I then try to create an Append Blob:
const azure = require('azure-storage');
const service = azure.createBlobService("[ACCOUNT]", "[KEY]");
service.createAppendBlobFromText("[CONTAINER]",
"some-blob-name",
"some-text",
{},
(err, result) => {
console.log('err ->',err);
console.log('result ->',result);
});
The result of calling this is:
err -> { Error
at Function.StorageServiceClient._normalizeError (/[REMOVED]/node_modules/azure-storage/lib/common/services/storageserviceclient.js:1191:23)
at BlobService.StorageServiceClient._processResponse (/[REMOVED]/node_modules/azure-storage/lib/common/services/storageserviceclient.js:738:50)
at Request.processResponseCallback [as _callback] (/[REMOVED]/node_modules/azure-storage/lib/common/services/storageserviceclient.js:311:37)
at Request.self.callback (/[REMOVED]/node_modules/request/request.js:186:22)
at emitTwo (events.js:125:13)
at Request.emit (events.js:213:7)
at Request.<anonymous> (/[REMOVED]/node_modules/request/request.js:1163:10)
at emitOne (events.js:115:13)
at Request.emit (events.js:210:7)
at IncomingMessage.<anonymous> (/[REMOVED]/node_modules/request/request.js:1085:12)
at Object.onceWrapper (events.js:314:30)
at emitNone (events.js:110:20)
at IncomingMessage.emit (events.js:207:7)
at endReadableNT (_stream_readable.js:1045:12)
at _combinedTickCallback (internal/process/next_tick.js:138:11)
at process._tickCallback (internal/process/next_tick.js:180:9)
name: 'StorageError',
message: 'Append blobs are not supported.\nRequestId:ed1777f4-601c-00cf-19a0-bb77ba000000\nTime:2018-03-14T14:25:50.8138962Z',
code: 'BlobTypeNotSupported',
statusCode: 400,
requestId: 'ed1777f4-601c-00cf-19a0-bb77ba000000' }
result -> null
I have not been able to find anything, when searching for the error.
Am I missing something here?
Please check the redundancy kind of the storage account in which you're trying to create this blob.
Blob type support varies by the storage account redundancy kind.
For example, ZRS Classic redundancy kind of storage account only supports Block Blob while Premium LRS redundancy kind of storage account only supports Page Blob.

Categories