I'm working with the Microsoft Bot Framework (hosting on Azure) using Node.js, and saving files locally isn't an option. I have image data in base64 format, and I need to transfer that data into a readable stream of an image file that I can pump into Azure using their Blob Storage API (which takes a stream).
var base64ImageData; //my base64 image
//Create myStream here
//Then send to Azure
blobService.createBlockBlobFromStream('mycontainer',
nameForBlob, myStream, fileSize,
function (error, result, response){
if(!error)
console.log(response);
else
console.log(error)
});
Any help would be appreciated. I can't figure out how to decode the base64 and make a stream out of it without saving a jpg onto my disk.
Please try something like this:
var azureStorage = require('azure-storage');
var streamifier = require('streamifier');
var base64String = 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAEiSURBVDhPxZIhT0NBEITnoBKBrEQgEUhISEiqKiFUYJDI/gBEc8VVY5AYHKKGBIEAVBFN+A8EUVFZUdGk/XbvGtqmpS8ImGTu5vbd7N3tPv0Pog5gzeSGB4oiqgSbCnphNbRQsKEQorbY/YCqaqwrXatl4WIJosrsfELtw3vucOFxsP4J6eRnlJnfOf3S4xk/J0hmO3kPfmE+5er+9ilSAqtoU203TGEFC7pDHcFBNvf82wxSgqAzxhPmDsbdHLtl9FZhbmDuul5AKmLUNuoDtQMH8BGeQ0OXBIckGOX1HL67ELlq6m8pBRyjbF56umEzz9KbPnXM9qBKjhhuMFsdVmKxC/ZzvCbpVW9kvRLzCeydY/9J+sx11laPXyB63/8C0gQlpj3Fc3O2WAAAAABJRU5ErkJggg==';
var blobService = azureStorage.createBlobService('account-name',
'account-key');
var buffer = Buffer.from(base64String, 'base64');
var stream = streamifier.createReadStream(buffer);
blobService.createBlockBlobFromStream('container-name', 'checked.png', stream, buffer.byteLength, function(error, response) {
if (error) {
console.log('Error!');
console.log(error);
} else {
console.log('Blob uploaded successfully!');
console.log(response);
}
});
I had to install streamifier node package to convert buffer to stream.
Related
Im trying to send audio from a client (javascript) to a server (java). I take the user audio from the microphone and then make a blob from it (and a url for the blob). The project is a spring boot project so i am looking for a way to send it as a parameter in a method to upload it to the server.
Was hoping that it would be possible to upload the blob to the server, but it seems to only be avalible localy on the browser and since the url for the blob starts with "blob:" before "http" it causes problems.
I have also looked at serialization but dont seem to find a way to do that with a blob in js.
Just passing the blob url here between the client and the server
Client side in js
// Convert the audio data in to blob
// after stopping the recording
mediaRecorder.onstop = function (ev) {
console.log(dataArray);
// blob of type mp3
let audioData = new Blob(dataArray,
{ 'type': 'audio/mp3;' });
// After fill up the chunk
// array make it empty
dataArray = [];
// Creating audio url with reference
// of created blob named 'audioData'
let audioSrc = window.URL
.createObjectURL(audioData);
//console.log(audioSrc);
// Pass the audio url to the 2nd video tag
playAudio.src = audioSrc;
const url = "http://localhost:8080/speech?url=" + audioSrc;
console.log(url);
$.get(url, function(data) {
$("#resultat").html("transcribed tekst: " + data);
});
}
Server in Java
#GetMapping("/speech")
public String speechToText(String url) throws IOException {
try (SpeechClient speechClient = SpeechClient.create()) {
// The path to the audio file to transcribe
String gcsUri = url;
// Builds the sync recognize request
RecognitionConfig config =
RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setSampleRateHertz(16000)
.setLanguageCode("en-US")
.build();
RecognitionAudio audio = RecognitionAudio.newBuilder().setUri(gcsUri).build();
// Performs speech recognition on the audio file
RecognizeResponse response = speechClient.recognize(config, audio);
List<SpeechRecognitionResult> results = response.getResultsList();
for (SpeechRecognitionResult result : results) {
// There can be several alternative transcripts for a given chunk of speech. Just use the
// first (most likely) one here.
SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
System.out.printf("Transcription: %s%n", alternative.getTranscript());
return alternative.getTranscript();
}
return "idk";
} catch (IOException e) {
e.printStackTrace();
return "noe ble feil";
}
}
I'm using the gcloud API on a Nodejs web server to upload files. I'd prefer the files not be uploaded on the client side and instead uploaded on the server. Currently, I am producing a blob on the client side, then converting it to text and passing that to the server through a POST request. All of the information gets successfully passed from the client to the server as expected. This data is also uploaded to gcloud, however, Gcloud does not recognize this as a valid file nor does my computer when I download it.
What is the best way to get the contents of the file to gcloud from the server side? I've tried using dataURIs and reading the orignal file by text and both produce similiar issues. I've also explored piping a readFileStream from the blob on the server end but blobs are not natively supported by node so I have not done so yet.
Client Side
function readSingleFile(e, func, func2){
var file = e.target.files[0];
if(!file){
return; // Add error msg_here
}
var reader = new FileReader();
reader.onload = function(e){
let contents = e.target.result;
let img = document.createElement('img')
let cvs = document.createElement('canvas');
img.onload = ()=>{
cvs.width = img.width;
cvs.height= img.height;
let ctx = cvs.getContext('2d');
ctx.drawImage(img,0,0);
cvs.toBlob((res)=>{res.text().then((text)=>{func2(text)})}, "image/jpeg", 0.92);
}
img.src=contents;
func(contents);
}
reader.readAsDataURL(file);
}
Server Side
function publishPrintjob(dataObj){
try{
var newElemKey = database.ref().child('queue').push().key; // Get random Key
// Create a new blob in the bucket and upload the file data.
const gcloudFile = storage.file('images/' + newElemKey + '.jpg');
gcloudFile.save(dataObj.sockImageFile, function(err) {
if (!err) {
Console.log("File Uploaded!")
}
});
var data = {
date: dataObj.Date,
email: dataObj.email,
design: dataObj.Design,
author: dataObj.Author,
address: dataObj.address,
imageKey: newElemKey,
}
admin.database().ref('queue/' + newElemKey).set(data);
} catch(err){
console.log(err)
}
}
Note: func simply shows the image on the client side, func2 just adds the contents to the POST object.
Uploading a file directly from the computer would be easiest using the storage.bucket(bucketName).upload() function from the cloud storage library. However, this uses location of a file locally and thus will not work unless a file is transferred to the server and saved first. This could be achieved using multi-part form data. Using multipart or uploading locally are better methods for uploading to google storage.
Instead, I solve this by first converting the image to a dataURI, sending the data URI to the server via the body of a GET request, and then converting it to a buffer with a readable stream that can be piped to google storage.
Client
let formData = getFormData('myForm');
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
// Typical action to be performed when the document is ready:
}
};
xhttp.open("POST", "dashboard", true);
xhttp.setRequestHeader('Content-Type', 'application/json');
xhttp.send(JSON.stringify(formData));
xhttp.onload = ()=> {
console.log(JSON.parse(xhttp.response))
// Handle server response here
};
}
Server
// DataObject is the body of the GET request, the property imageFile is the URI from readFileAsURI
function uploadImageOnServer(dataObj){
try{
var newElemKey = database.ref().child('queue').push().key; // Get random Key to use as filename
// Create a new blob in the bucket and upload the file data.
const gcloudFile = storage.file('images/' + newElemKey + '.jpeg');
var fs = require('fs'); // Should be required at the top of the file
var string = dataObj.ImageFile;
var regex = /^data:.+\/(.+);base64,(.*)$/;
var matches = string.match(regex);
var ext = matches[1];
var data = matches[2];
var buffer = Buffer.from(data, 'base64');
// Create the readstream
const readableInstanceStream = new Readable({
read() {
this.push(buffer);
this.push(null);
}
});
readableInstanceStream.pipe(gcloudFile.createWriteStream()) // link to gcloud storage api
.on('error', function(err) {
console.log('error')
})
.on('finish', function() {
console.log('upload complete')
});
} catch(err){
console.log(err)
}
}
I've been fighting for days with this problem:insert file into database with Spring Boot, JPA Hibernate and Vue.js frontend.
(Yes I know it's better to store the location to retrieve the file and not the file itself, but I have to store the file, so move on.) I tried different solutions but I didn't manage. First I passed the file path from fontend to backend as a normal field of my json data and used:
String path= json.get("file_name").asText();
File file = new File(path);
byte[] fileInBytes = new byte[(int) file.length()];
FileInputStream fileInputStream = new FileInputStream(file);
fileInputStream.read(fileInBytes);
fileInputStream.close();
c.setFile(fileInBytes);
It worked only if I passed the explicit path, because from my HTML input type=file I always got C:/fakepath/filename and the backend didn't find the file obviously. Is there any way to pass the explicit path? I've searched for a while but I couldn't find a solution, so I changed my mind.
Now I'm passing the base64 encode of the file from the frontend in Vue with this code (thanks to How to convert file to base64 in JavaScript?):
getBase64(file, onLoadCallback) {
return new Promise(function(resolve, reject) {
var reader = new FileReader();
reader.onload = function() { resolve(reader.result); };
reader.onerror = function(error) {
console.log('Error when converting file to base64: ', error);
};
reader.readAsDataURL(file);
});
},
uploadFile: function(event) {
this.input_file = document.getElementById("challenge_file").files[0];
this.base64_file = this.getBase64(this.input_file);
this.base64_file.then(function(result) {
console.log(result);
this.File=JSON.stringify({'file': this.base64_file});
axios.post("/uploadfile",
this.File,
{ headers: {
'Content-type': 'application/json',
}
}).then(function(response){
location.reload(true);
}).catch(function (error){
console.log(error);
});
});
}
this code works and I get a base64 string also in the backend, in which I try to convert it in bytes[] because my file is a #Lob private byte[] file;. This is my code in the backend controller:
System.out.println(json.get("file"));
//print "data:text/plain;base64,aVZCT1J..."
int init= json.get("file").asText().lastIndexOf(",");
String base64file=json.get("file").asText().substring(init+1);
//I get only the part after 'base64,' *(see below)
System.out.println(base64file);
byte[] decodedByte = Base64.getDecoder().decode(base64file);
//I decode it into bytes[]
c.setFile(decodedByte);
*I get only the the part after 'base64,' otherwise if I use all the String I get this error: enter java.lang.IllegalArgumentException: Illegal base64 character 3a
This code has no errors, but the Blob in the database is empty, while in the first way I could open the file preview from Hibernate, but only if I wrote the correct real path, not retrieving it from the input.
Any suggestion? What should I do?
SOLVED:
Thanks to an answer to this question I changed my backend into:
System.out.println(json.get("file"));
String data= json.get("file").asText();
String partSeparator = ",";
if (data.contains(partSeparator)) {
String encodedImg = data.split(partSeparator)[1];
byte[] decodedImg = Base64.getDecoder().decode(encodedImg.getBytes(StandardCharsets.UTF_8));
c.setFile(decodedImg);
}
and now I see the correct file in the db.
I am compressing a string in Python and trying to decompress it in JavaScript.
Compress a string in Python:
import zlib, base64
text = "Test string"
result = base64.b64encode(zlib.compress(text.encode("utf-8"))
In JavaScript:
const zlib = require('zlib');
var data = atob(<text encoded in base64 from Python>);
zlib.inflate(data, function(err, buffer){
if(!err){
// doing something
}
else{
console.log(err); // <-- Error: data error: incorrect header check
}
});
In JavaScript, it returns "Incorrect header check" error. How should I compress the data in Python to obtain the same value of zlib.inflate() in JavaScript?
You are passing a string to zlib.inflate in your javascript code, but it expects a Buffer instance.
var data = Buffer.from('eJwLSS0uUSguKcrMSwcAGWAEWA==', 'base64')
zlib.inflate(data, function(err, buffer) {
if (!err) {
console.log(buffer.toString());
} else {
console.log(err);
}
})
Test string
In node.js how can I download specific amount of file, like only first 10 kilobyte of a file.
in my project, I need to extract mp3 duration and bitrate from remote file and the only way I think is download just a few bytes to the entire file.
As some guy said here If you download (at least) the first 32kB of the file you should be OK for most MP3 files.
In Node.js you know when a chunk of data is available. If you do a http request you are given a response object which contains the headers. This response object is also a stream with events. The one you are searching for is "data". With this event you get a buffer filled with the data received (only the new data, the previously received chunks are not present). You just have to have a buffer and append to it each time you get a chunk of data. You can also get the length of the buffer. You can choose to stop downloading the data when you have enough data using the method destroy method. Here is an example :
var http = require("http");
var buff = new Buffer(0);
http.get("http://epfl.ch", function(res) {
res.on('data', function(chunk) {
buff = Buffer.concat([buff, chunk]);
if (buff.length > 10240) {
res.destroy();
console.log(buff);
}
});
})
this code will wait to fetch 10kb bytes then end the request. Then you can dou whatever you want with the data (buff)
If you want to save the data to file while downloading you can do this instead :
var http = require("http");
var buff = new Buffer(0);
var fs = require("fs");
var file = fs.createWriteStream("file.mp3");
http.get("http://epfl.ch", function(res) {
res.pipe(file);
res.on('data', function(chunk) {
buff = Buffer.concat([buff, chunk]);
if (buff.length > 10240) {
res.destroy();
file.close();
console.log(buff);
}
});
})
This code will create an input stream for a file and pipe the body of the request to this file (ie. each chunk of data received will be append to the file).
If you don't want to do anything with your buffer you don't need to keep it you can just count the number of bytes received and stop when you need.
var http = require("http");
var bytesRecieved = 0;
var fs = require("fs");
var file = fs.createWriteStream("file.mp3");
http.get("http://epfl.ch", function(res) {
res.pipe(file);
res.on('data', function(chunk) {
bytesRecieved += chunk.length;
if (bytesRecieved > 10240) {
res.destroy();
file.close();
}
});
})
Best regards,