Running async function locally using NodeJS in Windows 10 - javascript

I am struggling to run an async function taken from a Google example alongside the Environment Variables on Windows 10. I have created a bucket at GCS and uploaded my .raw file.
I then created a .env file which contains the following
HOST=localhost
PORT=3000
GOOGLE_APPLICATION_CREDENTIALS=GDeveloperKey.json
Doing this in AWS Lambda is just a case of wrapping the code within exports.handler = async (event, context, callback) => {
How can I emulate the same locally in Windows 10?
// Imports the Google Cloud client library
const speech = require('#google-cloud/speech');
// Creates a client
const client = new speech.SpeechClient();
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const gcsUri = 'gs://my-bucket/audio.raw';
// const encoding = 'Encoding of the audio file, e.g. LINEAR16';
// const sampleRateHertz = 16000;
// const languageCode = 'BCP-47 language code, e.g. en-US';
const config = {
encoding: encoding,
sampleRateHertz: sampleRateHertz,
languageCode: languageCode,
};
const audio = {
uri: gcsUri,
};
const request = {
config: config,
audio: audio,
};
// Detects speech in the audio file. This creates a recognition job that you
// can wait for now, or get its result later.
const [operation] = await client.longRunningRecognize(request);
// Get a Promise representation of the final result of the job
const [response] = await operation.promise();
const transcription = response.results
.map(result => result.alternatives[0].transcript)

Wrap your await statements into an immediately-invoked async function.
Ex:
(async () => {
// Detects speech in the audio file. This creates a recognition job that you
// can wait for now, or get its result later.
const [operation] = await client.longRunningRecognize(request);
// Get a Promise representation of the final result of the job
const [response] = await operation.promise();
const transcription = response.results
.map(result => result.alternatives[0].transcript)
})();

Related

events.Swap is not triggered in web3 for a Uniswap Pool

Visual Studio Code 1.66.2
ganache v7.1.0 (#ganache/cli: 0.2.0, #ganache
I am new to Solidity and this is my first example I am trying out.
I am trying to get noticed when there is a swap event in the Uniswap Pool "ETH-APE" using:
uPair.events.Swap
The code seems to start where: Waiting for swaps...
I can see on Uniswap itself that there is regular swaps but the swap event seems to not trigger where the console.log should show: someone swapped now!
I have started this blockchain successfully with the command like this:
Notice: I have a real apikey from alchemy and a real mnemonic from ganache-cli
ganache-cli -f wss://eth-mainnet.alchemyapi.io/v2/myAPIkey --mnemonic "word1 word2 word3 etc" -p 7545
I have just followed some examples and are not sure exactly what I am doing:
As I am a beginner I must also ask about ganache. As I have understand this is a "Fake and local Sandboxed" blockchain just existing on my computer?
I think I understand that I start ganache-cli on my computer but are not sure if the uPair.events.Swap listen to my local blockchain which is not the REAL blockchain and this is because the swap event is not triggered. If that is the case then that is what I wonder what I am missing?
(I am not even sure I need ganache to listen to this swap event?)
I am not sure what the alchemyapi.io with myAPIkey is doing in the command to start the local ganache-cli where in the same command I use the mnemonic which has been generated from ganache?
// -- HANDLE INITIAL SETUP -- //
require("dotenv").config();
const Web3 = require('web3')
const IERC20 = require('#openzeppelin/contracts/build/contracts/ERC20.json')
const IUniswapV2Pair = require("#uniswap/v2-core/build/IUniswapV2Pair.json")
const IUniswapV2Factory = require("#uniswap/v2-core/build/IUniswapV2Factory.json")
const { ChainId, Token } = require("#uniswap/sdk")
let web3 = new Web3('ws://127.0.0.1:7545')
const main = async () => {
const _eth_address = "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2";
const _apecoin_address = "0x4d224452801aced8b2f0aebe155379bb5d594381";
const _uniswap_factory_address = "0x5C69bEe701ef814a2B6a3EDD4B1652CB9cc5aA6f";
const uFactory = new web3.eth.Contract(IUniswapV2Factory.abi, _uniswap_factory_address) // UNISWAP FACTORY CONTRACT
const { token0Contract, token1Contract, token0, token1 } = await getTokenAndContract(_eth_address, _apecoin_address)
let uPair = await getPairContract(uFactory, token0.address, token1.address)
console.log("Waiting for swaps...");
uPair.events.Swap({}, async () => {
console.log("someone swapped now!");
})
}
async function getPairAddress(_V2Factory, _token0, _token1) {
const pairAddress = await _V2Factory.methods.getPair(_token0, _token1).call()
return pairAddress
}
async function getPairContract(_V2Factory, _token0, _token1) {
const pairAddress = await getPairAddress(_V2Factory, _token0, _token1)
const pairContract = new web3.eth.Contract(IUniswapV2Pair.abi, pairAddress)
return pairContract
}
async function getTokenAndContract(_token0Address, _token1Address) {
const token0Contract = new web3.eth.Contract(IERC20.abi, _token0Address)
const token1Contract = new web3.eth.Contract(IERC20.abi, _token1Address)
const token0 = new Token(
ChainId.MAINNET,
_token0Address,
18,
await token0Contract.methods.symbol().call(),
await token0Contract.methods.name().call()
)
const token1 = new Token(
ChainId.MAINNET,
_token1Address,
18,
await token1Contract.methods.symbol().call(),
await token1Contract.methods.name().call()
)
return { token0Contract, token1Contract, token0, token1 }
}
main()

How to find if Azure File exists on NodeJS

I'm using the azure file storage, and using express JS to write a backend to render the contents stored in the azure file storage.
I am writing the code based on https://learn.microsoft.com/en-us/javascript/api/#azure/storage-file-share/shareserviceclient?view=azure-node-latest
const { ShareServiceClient, StorageSharedKeyCredential } = require("#azure/storage-file-share");
const account = "<account>";
const accountKey = "<accountkey>";
const credential = new StorageSharedKeyCredential(account, accountKey);
const serviceClient = new ShareServiceClient(
`https://${account}.file.core.windows.net`,
credential
);
const shareName = "<share name>";
const fileName = "<file name>";
// [Node.js only] A helper method used to read a Node.js readable stream into a Buffer
async function streamToBuffer(readableStream) {
return new Promise((resolve, reject) => {
const chunks = [];
readableStream.on("data", (data) => {
chunks.push(data instanceof Buffer ? data : Buffer.from(data));
});
readableStream.on("end", () => {
resolve(Buffer.concat(chunks));
});
readableStream.on("error", reject);
});
}
And you can view the contents through
const downloadFileResponse = await fileClient.download();
const output = await streamToBuffer(downloadFileResponse.readableStreamBody)).toString()
Thing is, I only want to find if the file exists and not spend time downloading the entire file, how could I do this?
I looked at https://learn.microsoft.com/en-us/javascript/api/#azure/storage-file-share/shareserviceclient?view=azure-node-latest
to see if the file client class has what I want, but it doesn't seem to have methods useful for this.
If you are using #azure/storage-file-share (version 12.x) Node package, there's an exists method in ShareFileClient. You can use that to find if a file exists or not. Something like:
const fileExists = await fileClient.exists();//returns true or false.

Having trouble understanding multiple awaits in async functions

I wrote this function to generate png files from some svg files across a few directories. I was doing the below functionality synchronously and it was working as expected (same code as below but with readFileSync), but was told to re-do it to use only promisified fs functions.
The current code skips a couple files in both groupA and groupB, plus its swapping widths. For example, I've noticed the conversion function wont generate for svg1 of dirB, but will generate for svg1 of dirA though it has incorrect width that matches svg1 of dirB.
Most files convert correctly, but a handful don't. My guess is its a timing issue, so how do I fix that while still keeping the fs functionality all promisified?
const { createConverter } = require('convert-svg-to-png');
const fs = require('fs');
const path = require('path');
const util = require('util');
const readdir = util.promisify(fs.readdir);
const readFile = util.promisify(fs.readFile);
async function convertSvgFiles(dirPath) {
const converter = createConverter();
try {
const files = await readdir(dirPath);
for (let file of files) {
const currentFile = path.join(dirPath, fil);
const fileContents = await readFile(currentFile);
const fileWidth = fileContents.toString('utf8').match(\*regex capture viewbox width*\);
await converter.convertFile(currentFile, { width: fileWidth });
}
} catch (err) {
console.warn('Error while converting a file to png', '\n', err);
} finally {
await converter.destroy();
}
}
['dirA', 'dirB', 'dirC'].map(dir => convertSvgFiles(`src/${dir}`));
Your code looks pretty good. I do not see anything obvious that would cause the behavior you describe - you await each promise within the function. You are not using global variables.
I'll say that this line:
['dirA', 'dirB', 'dirC'].map(dir => convertSvgFiles(`src/${dir}`));
ends up running your function on all 3 directories in parallel, with 3 converter instances. Assuming there are no parallel-related bugs in converter, this should not cause any issues.
But just for grins, try changing that to:
async function run() {
for (let d of ['dirA', 'dirB', 'dirC']) {
await convertSvgFiles(`src/${d}`)
}
}
run()
to force the folders to be scanned sequentially. If this resolves the issue, then there's a bug within convert-svg-to-png.

Downloading an Azure Storage Blob using pure JavaScript and Azure-Storage-Js

I'm trying to do this with just pure Javascript and the SDK. I am not using Node.js. I'm converting my application from v2 to v10 of the SDK azure-storage-js-v10
The azure-storage.blob.js bundled file is compatible with UMD
standard, if no module system is found, following global variable
will be exported: azblob
My code is here:
const serviceURL = new azblob.ServiceURL(`https://${account}.blob.core.windows.net${accountSas}`, pipeline);
const containerName = "container";
const containerURL = azblob.ContainerURL.fromServiceURL(serviceURL, containerName);
const blobURL = azblob.BlobURL.fromContainerURL(containerURL, blobName);
const downloadBlobResponse = await blobURL.download(azblob.Aborter.none, 0);
The downloadBlobResponse looks like this:
downloadBlobResponse
Using v10, how can I convert the downloadBlobResponse into a new blob so it can be used in the FileSaver saveAs() function?
In azure-storage-js-v2 this code worked on smaller files:
let readStream = blobService.createReadStream(containerName, blobName, (err, res) => {
if (error) {
// Handle read blob error
}
});
// Use event listener to receive data
readStream.on('data', data => {
// Uint8Array retrieved
// Convert the array back into a blob
var newBlob = new Blob([new Uint8Array(data)]);
// Saves file to the user's downloads directory
saveAs(newBlob, blobName); // FileSaver.js
});
I've tried everything to get v10 working, any help would be greatly appreciated.
Thanks,
You need to get the body by await blobBody.
downloadBlobResponse = await blobURL.download(azblob.Aborter.none, 0);
// data is a browser Blob type
const data = await downloadBlobResponse.blobBody;
Thanx Mike Coop and Xiaoning Liu!
I was busy making a Vuejs plugin to download blobs from a storage account. Thanx to you, I was able to make this work.
var FileSaver = require('file-saver');
const { BlobServiceClient } = require("#azure/storage-blob");
const downloadButton = document.getElementById("download-button");
const downloadFiles = async() => {
try {
if (fileList.selectedOptions.length > 0) {
reportStatus("Downloading files...");
for await (const option of fileList.selectedOptions) {
var blobName = option.text;
const account = '<account name>';
const sas = '<blob sas token>';
const containerName = '< container name>';
const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net${sas}`);
const containerClient = blobServiceClient.getContainerClient(containerName);
const blobClient = containerClient.getBlobClient(blobName);
const downloadBlockBlobResponse = await blobClient.download(blobName, 0, undefined);
const data = await downloadBlockBlobResponse.blobBody;
// Saves file to the user's downloads directory
FileSaver.saveAs(data, blobName); // FileSaver.js
}
reportStatus("Done.");
listFiles();
} else {
reportStatus("No files selected.");
}
} catch (error) {
reportStatus(error.message);
}
};
downloadButton.addEventListener("click", downloadFiles);
Thanks Xiaoning Liu!
I'm still learning about async javascript functions and promises. Guess I was just missing another "await". I saw that "downloadBlobResponse.blobBody" was a promise and also a blob type, but, I couldn't figure out why it wouldn't convert to a new blob. I kept getting the "Iterator getter is not callable" error.
Here's my final working solution:
// Create a BlobURL
const blobURL = azblob.BlobURL.fromContainerURL(containerURL, blobName);
// Download blob
downloadBlobResponse = await blobURL.download(azblob.Aborter.none, 0);
// In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody
const data = await downloadBlobResponse.blobBody;
// Saves file to the user's downloads directory
saveAs(data, blobName); // FileSaver.js

stream large files in node.js lambda

I am new to javascript have written some nodejs code to calculate checksum of files in S3 by streaming using the crypto module. It does fine when the items are small sizes (1-5GB), larger files will timeout because not all the stream data has been consumed by the time lambda timeout is up and end event have not been reached. I am wondering if there are ways to tune this code such that it will handles the big files in about 30gb range? I noticed that in my lambda the cpu memory is barely being fully utilized, each time it only uses about 10% 148mb/1530mb allocated, can I do anything there? Any help is appreciated, thanks!
var AWS = require('aws-sdk');
const crypto = require('crypto');
const fs = require('fs');
const s3 = new AWS.S3();
let s3params = {
Bucket: 'nlm-qa-int-draps-bucket',
//Key: filename.toString(),
Key: '7801339A.mkv',
};
let hash = crypto.createHash('md5');
let stream = s3.getObject(s3params).createReadStream();
stream.on('data', (data) => {
hash.update(data);
});
stream.on('end', () => {
var digest = hash.digest('hex');
console.log("this is md5 value from digest: " + digest);
callback(null, digest);
digest = digest.toString().replace(/[^A-Za-z 0-9 \.,\?""!##\$%\^&\*\(\)-_=\+;:<>\/\\\|\}\{\[\]`~]*/g, '');

Categories