How to convert Base64 into HTML using node js - javascript

Here i tried converting the base64 to html using the Mammoth npm, but it is throwing an error: -
throw new Error(“Can’t find end of central directory : is this a zip file ? ” +
Error: Can’t find end of central directory : is this a zip file ? If it is, see http://stuk.github.io/jszip/documentation/howto/read_zip.html
at ZipEntries.readEndOfCentral (/Users/Desktop/mommoth/node_modules/jszip/lib/zipEntries.js:149:23)
at ZipEntries.load (/Users/Desktop/mommoth/node_modules/jszip/lib/zipEntries.js:215:14)
at new ZipEntries (/Users/Desktop/mommoth/node_modules/jszip/lib/zipEntries.js:21:14)
at JSZip.module.exports [as load] (/Users/Desktop/mommoth/node_modules/jszip/lib/load.js:11:18)
at new JSZip (/Users/Desktop/mommoth/node_modules/jszip/lib/index.js:39:14)
at Object.openArrayBuffer (/Users/Desktop/mommoth/node_modules/mammoth/lib/zipfile.js:10:19)
at Object.openZip (/Users/Desktop/mommoth/node_modules/mammoth/lib/unzip.js:16:41)
at convert (/Users/Desktop/mommoth/node_modules/mammoth/lib/index.js:34:18)
at Object.convertToHtml (/Users/Desktop/mommoth/node_modules/mammoth/lib/index.js:22:12)
at /Users/Desktop/mommoth/server.js:49:10
at FSReqWrap.readFileAfterClose [as oncomplete] (fs.js:511:3)
let base64String = 'data:text;base64,TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=';
let base64Path = base64String.split(';base64,').pop();
let buff = new Buffer(base64Path, 'Base64');
console.log(buff);
mammoth.convertToHtml({ buffer : buff })
.then(function(error,result){
if(error){console.error(error)}
else{
console.log('convert');
console.log(result);
}
})
.done();
});

What is your node version?
new Buffer(base64Path, 'base64');
This method is for Node.js v5.11.1 and below, if your Node.js version is v6.0.0 or above, you should convert in this way
let buff = Buffer.from(base64Path, 'base64');

Related

ReferenceError: File is not defined - express

I`m trying to convert some base64 string to a image file and pass it to firebase via express.
Everything works fine on front end, except this part:
const convertBase64ToFile = (base64String, fileName) => {
let arr = base64String.split(',');
let mime = arr[0].match(/:(.*?);/)[1];
let bstr = atob(arr[1]);
let n = bstr.length;
let uint8Array = new Uint8Array(n);
while (n--) {
uint8Array[n] = bstr.charCodeAt(n);
}
const file = new File([uint8Array], fileName, { type: mime }); /// getting Error in this line
return file
}
Which library i have to import?
Error:
const file = new File([uint8Array], fileName, { type: mime }); /// getting Error in this line
^
ReferenceError: File is not defined
at convertBase64ToFile (C:\Users\rahto\devel\new\maissaudeapi\api\firestore\write.js:19:16)
at conversor (C:\Users\rahto\devel\new\maissaudeapi\api\firestore\write.js:33:16)
at C:\Users\rahto\devel\new\maissaudeapi\mainServer.js:31:21
at Layer.handle [as handle_request] (C:\Users\rahto\devel\new\maissaudeapi\node_modules\express\lib\router\layer.js:95:5)
at next (C:\Users\rahto\devel\new\maissaudeapi\node_modules\express\lib\router\route.js:144:13)
at Route.dispatch (C:\Users\rahto\devel\new\maissaudeapi\node_modules\express\lib\router\route.js:114:3)
at Layer.handle [as handle_request] (C:\Users\rahto\devel\new\maissaudeapi\node_modules\express\lib\router\layer.js:95:5)
at C:\Users\rahto\devel\new\maissaudeapi\node_modules\express\lib\router\index.js:284:15
at Function.process_params (C:\Users\rahto\devel\new\maissaudeapi\node_modules\express\lib\router\index.js:346:12)
at next (C:\Users\rahto\devel\new\maissaudeapi\node_modules\express\lib\router\index.js:280:10)
Node.js v18.6.0
[nodemon] app crashed - waiting for file changes before starting...
Then, i changed to this:
const convertBase64ToFile = (base64String, fileName) => {
let arr = base64String.split(',');
let mime = arr[0].match(/:(.*?);/)[1];
let bstr = atob(arr[1]);
let n = bstr.length;
let uint8Array = new Uint8Array(n);
while (n--) {
uint8Array[n] = bstr.charCodeAt(n);
}
const file = fs.writeFileSync(fileName, uint8Array)
let fiz = fs.readFileSync(fileName, file);
// const file = new File([uint8Array], fileName, { type: mime });
return fiz
}
And got this error:
C:\Users\rahto\devel\new\maissaudeapi\node_modules\#firebase\storage\dist\index.node.cjs.js:3036
const newPath = child(ref._location.path, childPath);
^
TypeError: Cannot read properties of undefined (reading 'path')
It seems you are using node runtime. You can use fs module to access file system.
fs.writeFile( file, data, options, callback )
Parameters: This method accept four parameters as mentioned above and described below:
file: It is a string, Buffer, URL or file description integer that denotes the path of the file where it has to be written. Using a file descriptor will make it behave similar to fs.write() method.
data: It is a string, Buffer, TypedArray or DataView that will be written to the file.
options: It is an string or object that can be used to specify optional parameters that will affect the output. It has three optional parameter:
encoding: It is a string value that specifies the encoding of the file. The default value is ‘utf8’.
mode: It is an integer value that specifies the file mode. The default value is 0o666.
flag: It is a string value that specifies the flag used while writing to the file. The default value is ‘w’.
callback: It is the function that would be called when the method is executed.
err: It is an error that would be thrown if the operation fails.
Usage:
var fs = require('fs');
fs.writeFile('file.txt', 'Hello content!', function (err) {
if (err) throw err;
console.log('Saved!');
});
Also check more here and here for usage.
And also check here for documentation.

Python decoding image spawned by Node.js gives `TypeError: memoryview: a bytes-like object is required, not 'str'`

I'm trying to decode and save an image file passed to a python script.
The image file is read by a Node.JS script index.js and its data passed as image inside a JSON string {"image":readImageFile(), "name":"image.png"}.
The JSON string is received by a spawned Python script script.py.
The problem is when I try to decode the image data, I get the error TypeError: memoryview: a bytes-like object is required, not 'str'.
I tried converting the image data to bytes like base64.decodebytes(bytes(params['image'],'utf-8')) but got the error UnicodeEncodeError: 'utf-8' codec can't encode character '\udc8f' in position 66: surrogates not allowed.
How do I rightly decode the image binary data so I can save it as a file?
Could you please help me spot the problem?
Thanks in advance!
script.py:
import sys, json
import base64
json_str = input() # Capture input
params = json.loads(json_str) # parse input
fileName = params['name'] # Capture file name
fileData = base64.decodebytes(params['image']) # decode image
...
...
print("Image Saved!")
sys.stdout.flush()
index.js:
const spawn = require("child_process").spawn;
const fs = require('fs');
let params = {
"image":readImageFile(),
"name":"image.png"
}
const pyProcess = spawn('py',['script.py']);
pyProcess.stdin.write(JSON.stringify(params) + '\n');
pyProcess.stdout.on("data", (data) =>{
console.log(data.toString());
});
function readImageFile() {
try {
return fs.readFileSync('color.png', 'binary');
}
catch (err) {
return err;
}
}

File type from buffer using file-type

I tried finding type of file using file-type module but when i try to run it gives me error that "error:.... fileType.fromBuffer is not a function.
`
const fileType = require('file-type');
const readChunk = require('read-chunk');
exports.ext = async function(file) {
const buffer = readChunk.sync(file, 0, 512)
let type = await fileType.fromBuffer(buffer);
console.log("type of file",type)
return value
`
this is in utils.js file and i am calling it in other type.js file
`
var extension = await utils.ext('ABC.png')
console.log(extension)
`
Can anybody tell me what should i do?
seems like there is a miss in npm documentation for the module file-type.
The error which you are getting means the module doesn't expose such a method(fromBuffer).
the fileType accepts buffer as an input i.e.
replace
await fileType.fromBuffer(buffer)
with
await fileType(buffer)
kindly follow this file from the module source

How to write a base64 video to file in nodejs

My Express app is receiving a base64-encoded MP4 from the browser and writing it to a file. But the file isn't a valid video file, and the "file" utility simply identifies it as "data".
My Code:
const path = `${config.uploadPath}`;
const filename = `${uniqid()}.mp4`;
let base64Data = req.body.base64.replace(/^data:([A-Za-z-+/]+);base64,/, '');
fs.writeFileSync(`${path}${filename}`, base64Data, 'base64');
Are you sure there is a variable named base64 is request response? If so, please try this code:
req.body.base64 = req.body.base64.replace(/^data:(.*?);base64,/, ""); // <--- make it any type
req.body.base64 = req.body.base64.replace(/ /g, '+'); // <--- this is important
fs.writeFile(`${path}${filename}`, req.body.base64, 'base64', function(err) {
console.log(err);
});

Javascript - Read parquet data (with snappy compression) from AWS s3 bucket

In nodeJS, I am trying to read a parquet file (compression='snappy') but not successful.
I used https://github.com/ironSource/parquetjs npm module to open local file and read it but reader.cursor() throws cryptic error 'not yet implemented'. It does not matter which compression (plain, rle, or snappy) was used to create input file, it throws same error.
Here is my code:
const readParquet = async (fileKey) => {
const filePath = 'parquet-test-file.plain'; // 'snappy';
console.log('----- reading file : ', filePath);
let reader = await parquet.ParquetReader.openFile(filePath);
console.log('---- ParquetReader initialized....');
// create a new cursor
let cursor = reader.getCursor();
// read all records from the file and print them
if (cursor) {
console.log('---- cursor initialized....');
let record = await cursor.next() ; // this line throws exception
while (record) {
console.log(record);
record = await cursor.next();
}
}
await reader.close();
console.log('----- done with reading parquet file....');
return;
};
Call to read:
let dt = readParquet(fileKeys.dataFileKey);
dt
.then((value) => console.log('--------SUCCESS', value))
.catch((error) => {
console.log('-------FAILURE ', error); // Random error
console.log(error.stack);
})
More info:
1. I have generated my parquet files in python using pyarrow.parquet
2. I used 'SNAPPY' compression while writing file
3. I can read these files in python without any issue
4. My schema is not fixed (unknown) each time I write parquet file. I do not create schema while writing.
5. error.stack prints undefined in console
6. console.log('-------FAILURE ', error); prints "not yet implemented"
I would like to know if someone has encountered similar problem and has ideas/solution to share. BTW my parquet files are stored on AWS S3 location (unlike in this test code). I still have to find solution to read parquet file from S3 bucket.
Any help, suggestions, code example will be highly appreciated.
Use var AWS = require('aws-sdk'); to get data from S3.
Then use node-parquet to read parquet file into variable.
import np = require('node-parquet');
// Read from a file:
var reader = new np.ParquetReader(`file.parquet`);
var parquet_info = reader.info();
var parquet_rows = reader.rows();
reader.close();
parquet_rows = parquet_rows + "\n";
There is a fork of https://github.com/ironSource/parquetjs here: https://github.com/ZJONSSON/parquetjs which is a "lite" version of the ironSource project. You can install it using npm install parquetjs-lite.
The ZJONSSON project comes with a function ParquetReader.openS3, which accepts an s3 client (from version 2 of the AWS SDK) and params ({Bucket: 'x', Key: 'y'}). You might want to try and see if that works for you.
If you are using version 3 of the AWS SDK / S3 client, I have a compatible fork here: https://github.com/entitycs/parquetjs (see tag feature/openS3v3).
Example usage from the project's README.md:
const parquet = require("parquetjs-lite");
const params = {
Bucket: 'xxxxxxxxxxx',
Key: 'xxxxxxxxxxx'
};
// v2 example
const AWS = require('aws-sdk');
const client = new AWS.S3({
accessKeyId: 'xxxxxxxxxxx',
secretAccessKey: 'xxxxxxxxxxx'
});
let reader = await parquet.ParquetReader.openS3(client,params);
//v3 example
const {S3Client, HeadObjectCommand, GetObjectCommand} = require('#aws-sdk/client-s3');
const client = new S3Client({region:"us-east-1"});
let reader = await parquet.ParquetReader.openS3(
{S3Client:client, HeadObjectCommand, GetObjectCommand},
params
);
// create a new cursor
let cursor = reader.getCursor();
// read all records from the file and print them
let record = null;
while (record = await cursor.next()) {
console.log(record);
}

Categories