I am uploading an image from the local (ios) filesystem to Firebase using RNFetchBlob
Although my image is being deleted, my blob is remaining in the filesystem after closing it.
I tried unlinking the blob but it still remains
function createBlob(uri, mediaType) {
const Blob = RNFetchBlob.polyfill.Blob;
const typePrefix = isImage(mediaType) ? 'image' : 'video';
window.Blob = Blob;
window.XMLHttpRequest = RNFetchBlob.polyfill.XMLHttpRequest;
return new Promise((resolve, reject) => {
const uid = guid();
const mediaRef = firebase.storage().ref('users').child(`${uid}.${mediaType}`);
let uploadBlob = null;
Blob.build(uri, { type: `${typePrefix}/${mediaType}` })
.then((blob) => {
uploadBlob = blob;
return mediaRef.put(blob, { type: `${typePrefix}/${mediaType}` });
})
.then((response) => {
uploadBlob.close();
resolve(response);
})
.catch(error => {
reject(error);
});
});
}
This is how it looks in my simulator folder after closing the blob
After the createBlob I am deleting the file from local storage using RNFetchBlob.fs.unlink(path)
I don't think you're close() is working because the uploadBlob variable was created in the previous closure, but not passed on to the next .then block. I'm thinking this might work...
Blob.build(uri, { type: `${typePrefix}/${mediaType}` })
.then((blob) => {
uploadBlob = blob;
return ({
response: mediaRef.put(blob, { type: `${typePrefix}/${mediaType}` }),
blob: uploadBlob
})
.then(({response, blob}) => {
blob.close();
resolve(response);
})
Related
I want to save the pdf to Cloudant. With the code below, I get an error opening the Attachment in Cloudant. "An error was encountered when processing this file"
I can put fake string data in the "._attachments[name].data" field and it will save.
The Cloudant docs say the data content needs to be in base64 and that is what I am attempting.
Cloudant says "The content must be provided by using BASE64 representation"
function saveFile() {
var doc = {};
var blob = null;
//fileName is from the input field model data
var url = fileName;
fetch(url)
.then((r) => r.blob())
.then((b) => {
blob = b;
return getBase64(blob);
})
.then((blob) => {
console.log(blob);
let name = url._rawValue.name;
doc._id = "testing::" + new Date().getTime();
doc.type = "testing attachment";
doc._attachments = {};
doc._attachments[name] = {};
doc._attachments[name].content_type = "application/pdf";
doc._attachments[name].data = blob.split(",")[1];
console.log("doc: ", doc);
})
.then(() => {
api({
method: "POST",
url: "/webdata",
auth: {
username: process.env.CLOUDANT_USERNAME,
password: process.env.CLOUDANT_PASSWORD,
},
data: doc,
})
.then((response) => {
console.log("result: ", response);
alert("Test has been submitted!");
})
.catch((e) => {
console.log("e: ", e);
alert(e);
});
console.log("finished send test");
});
}
function getBase64(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onerror = (error) => reject(error);
});
}
any ideas?
Thanks
CouchDB, and by extension Cloudant, has a means of handling a "multi-part" request where the JSON document and the attachments are sent in the same request. See https://docs.couchdb.org/en/3.2.2/api/document/common.html#put--db-docid
They are modelled in CouchDB's Nano project here: https://www.npmjs.com/package/nano#multipart-functions
const fs = require('fs');
fs.readFile('rabbit.png', (err, data) => {
if (!err) {
await alice.multipart.insert({ foo: 'bar' }, [{name: 'rabbit.png', data: data, content_type: 'image/png'}], 'mydoc')
}
});
Alternatively, you could write the document first and add the attachment in a supplementary request. Using the current Cloudant SDKs:
write document https://cloud.ibm.com/apidocs/cloudant?code=node#putdocument
write attachment https://cloud.ibm.com/apidocs/cloudant?code=node#putattachment
const doc = {
a: 1,
b: 2
}
const res = await service.putDocument({
db: 'events',
docId: 'mydocid',
document: doc
})
const stream = fs.createReadStream('./mypdf.pdf')
await service.putAttachment({
db: 'events',
docId: 'mydocid',
rev: res.result.rev, // we need the _rev of the doc we've just created
attachmentName: 'mypdf',
attachment: stream,
contentType: 'application/pdf'
})
I found out I was doing too much to the PDF file. No need to make to blob then convert to base64.
Only convert to base64.
async function sendFiles() {
try {
const url = fileName;
const doc = {};
doc._attachments = {};
doc._id = "testing::" + new Date().getTime();
doc.type = "testing attachment";
for (let item of url._value) {
const blob2 = await getBase64(item);
let name = item.name;
doc._attachments[name] = {};
doc._attachments[name].content_type = item.type;
doc._attachments[name].data = blob2.split(",")[1];
}
const response = await api({
method: "POST",
url: "/webdata",
data: doc,
});
} catch (e) {
console.log(e);
throw e; // throw error so caller can see the error
}
console.log("finished send test");
fileName.value = null;
}
function getBase64(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onerror = (error) => reject(error);
});
}
This works for me.
I am using firebase to storage my 5 jpegs.
I need to download them and pass them to function to retrieve EXIF data from them.
To accomplish that tash i have made bellow script which is:
checking how many files are in specific folder in database
getting download url's for every file
looping through every url and executing function wchich is downloading files from those url's
The problem is: first four images are downloaded properly. The last one is allways not downloaded completelly.
Please see bellow code. Note fucntion called getHTML1 is redundant do getHTML, and gives the same result.
I just tried different approach.
TLDR:
How to make fetch wait for last file being completelly downloaded?
import {
getStorage,
ref,
listAll,
getDownloadURL
} from "firebase/storage";
const storage = getStorage();
const listRef = ref(storage, 'images');
const firebaseDownloadHandler = async() => {
function getHTML1(url) {
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open('get', url);
xhr.responseType = 'blob';
xhr.onload = function() {
var status = xhr.status;
if (status === 200) {
let blob = new Blob([xhr.response], {
type: 'image/jpeg'
})
console.log(xhr.response);
resolve(blob);
} else {
reject(status);
}
};
xhr.send();
});
}
function getHTML(url) {
return new Promise(async function(resolve, reject) {
await fetch(url, {
method: 'GET',
type: 'image/jpeg'
})
.then(res => res.blob())
.then(blob => {
resolve(blob)
});
})
}
const res = await listAll(listRef);
const requests = res.items.map(itemRef => getDownloadURL(itemRef))
const urls = await Promise.all(requests);
const processArray = async() => {
console.log(urls);
const finalResult = [];
return new Promise(async function(resolve, reject) {
for (let i = 0; i < urls.length; i++) {
const result = await getHTML(urls[i + 1]);
finalResult.push(result);
if (finalResult.length === urls.length) {
resolve(finalResult);
}
}
})
};
const downloaded = await processArray();
return await downloaded;
}
export default firebaseDownloadHandler;
result:
20:06:11.499
Array(5) [ "https://firebasestorage.googleapis.com/v0/b/geolocf.appspot.…025.jpg?alt=media&token=8ecbfc6d-07ed-4205-9599-a6e36dd444ed", "https://firebasestorage.googleapis.com/v0/b/geolocf.appspot.…028.jpg?alt=media&token=06a422fa-64f2-482f-9f63-c39aaf1d9354", "https://firebasestorage.googleapis.com/v0/b/geolocf.appspot.…855.jpg?alt=media&token=6ae03b2c-bd82-49fc-bcb6-0de0683e7d50", "https://firebasestorage.googleapis.com/v0/b/geolocf.appspot.…402.jpg?alt=media&token=a22ef4dd-7f79-40aa-90df-57ad73b10248", "https://firebasestorage.googleapis.com/v0/b/geolocf.appspot.…646.jpg?alt=media&token=e04958d0-ed2f-44f6-9931-18644ffbe8b8" ]
firebaseDownloadHandler.js:45
20:06:14.882
Array(5) [ Blob, Blob, Blob, Blob, Blob ]
0: Blob { size: 6428869, type: "image/jpeg" }
1: Blob { size: 7402504, type: "image/jpeg" }
2: Blob { size: 2858717, type: "image/jpeg" }
3: Blob { size: 3045876, type: "image/jpeg" }
4: Blob { size: 2278, type: "text/html; charset=utf-8" }
length: 5
<prototype>: Array []
I'm not sure if this is the cause of the problem, but you're overusing custom promises.
As far as I can see, this function:
function getHTML(url) {
return new Promise(async function(resolve, reject) {
await fetch(url, {
method: 'GET',
type: 'image/jpeg'
})
.then(res => res.blob())
.then(blob => {
resolve(blob)
});
})
}
Can be shorted to this without any change in functionality:
function getHTML(url) {
return fetch(url, {
method: 'GET',
type: 'image/jpeg'
})
.then(res => res.blob())
}
Yeah. Error is not caused by fetch itself.
I wrongly passed urls to fetching function.
Should be
const result = await getHTML(urls[i]);
Instead of
const result = await getHTML(urls[i + 1]);
I've tried the imgbb-uploader npm package but I only got it to work with other image URLs and not local files.
axios
.post("https://api.imgbb.com/1/upload", {
image: BASE_64_STRING,
name: file.name,
key: process.env.MY_API_KEY,
})
.then((res) => console.log(res))
.catch((err) => console.log(err));```
What about this:
// fileinput is the file/index 0 from input-type-file eg: e.target.myfileinput.files[0]
const uploadImg = ( fileinput ) => {
const formData = new FormData();
formData.append( "image", fileinput ); // has to be named 'image'!
let apiresponse = axios.post( 'https://api.imgbb.com/1/upload?key=your-api-key', formData )
.then( res => { return res.data } )
.catch( error => { return null } )
return apiresponse;
}
//From graph ql perspective
const { createReadStream, filename } = await file;
const url = "<URL_TO_IMAGE_SERVER&key=<YOUR_API_KEY>";
const stream = createReadStream();
const form = new FormData();
form.append("image", stream, filename);
try {
const response = await axios.post(url, form, {
headers: { ...form.getHeaders() },
});
console.log({ response });
return { Location: response.data.display_url };
} catch (error) {
return { ...error };
}
I am using the Node.js Google Drive client trying to download certain files from a gdrive. When using the example provided in their GitHub I get a Uncaught (in promise) TypeError: res.data.on is not a function error. The file is still created locally, but it's just an empty file from createWriteStream().
When I log the res variable I get: ReadableStream {locked: false}.
I'm pretty new to streams so this is quite a bit over my head.
Here is my code. You'll notice it's almost exactly what the example they provide looks like.
syncFileFromDrive(fileId, filePath) {
filePath.replace(userDataPath, '');
filePath = `${userDataPath}/${filePath}`;
filePath.replaceAll('//', '/');
logger.info(`Sync file from drive: Syncing file to path: ${filePath}`);
logger.info(`Sync file from drive: File id: ${fileId}`)
const dest = fs.createWriteStream(filePath);
let progress = 0;
this.drive.files.get({fileId, alt: 'media'}, {responseType: 'stream'}).then(res => {
console.log(res)
console.log(res.data)
res.data
.on('end', () => {
console.log('Done downloading file.');
folderStructure.buildFileMenu()
resolve(dest)
})
.on('error', err => {
console.error('Error downloading file.');
reject(err);
})
.on('data', d => {
progress += d.length;
if (process.stdout.isTTY) {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(`Downloaded ${progress} bytes`);
}
})
.pipe(dest);
});
}
Edit: I should add that this is for an Electron application. So while Node is supported, I'm not sure if that may affect the way I can use streams.
This feels like it's a bit of a work around, and I am open to any suggestions, but this was able to solve the issue I was having.
syncFileFromDrive(fileId, filePath) {
filePath.replace(userDataPath, '');
filePath = `${userDataPath}/${filePath}`;
filePath.replaceAll('//', '/');
logger.info(`Sync file from drive: Syncing file to path: ${filePath}`);
logger.info(`Sync file from drive: File id: ${fileId}`)
this.drive.files
.get({ fileId, alt: "media"}, {responseType: 'stream'})
.then((res) => {
const dest = fs.createWriteStream(filePath);
const decoder = new TextDecoder("utf-8");
const reader = res.data.getReader()
reader.read().then(function processText({ done, value }) {
if (done) {
console.log("Stream complete");
return;
}
dest.write(decoder.decode(value))
// Read some more, and call this function again
return reader.read().then(processText);
});
})
}
Please take a look at my implementation, which I used to downloading the file
import { google } from 'googleapis';
const getOauth2Client = () => new google.auth.OAuth2(
process.env.GOOGLE_DRIVE_CLIENT_ID,
process.env.GOOGLE_DRIVE_CLIENT_SECRET,
process.env.GOOGLE_DRIVE_REDIRECT_URL
);
const downloadFile = ({ id, access_token, path }) => {
return new Promise((resolve, reject) => {
const dest = fs.createWriteStream(path);
const oauth2Client = getOauth2Client();
oauth2Client.setCredentials({ access_token });
const drive = google.drive({
version: 'v3',
auth: oauth2Client
});
drive.files.get(
{ fileId: id, alt: 'media' }, { responseType: 'stream' },
(err, res) => {
if (err) reject(err);
res.data
.on('end', () => {
console.log('Done');
})
.on('error', _e => {
console.log('Error', _e);
if (_e) reject(_e);
})
.pipe(dest);
dest.on('finish', () => {
console.log('Download finished');
resolve(true);
});
}
);
});
};
This is because in the renderer process, Google's gaxios modules uses the fetch API instead of Node's http. Fetch API returns a ReadableStream unlike http which returns a Node.js Readable. Currently there's no way to change the default adapter. You can use this quick workaround the convert it.
// Transforms a web ReadableStream to Node.js Readable
function toNodeReadable(webStream) {
const reader = webStream.getReader();
const rs = new Readable();
rs._read = async () => {
const result = await reader.read();
if (!result.done) {
rs.push(Buffer.from(result.value));
} else {
rs.push(null);
}
};
return rs;
}
Usage with your code:
syncFileFromDrive(fileId, filePath) {
filePath.replace(userDataPath, '');
filePath = `${userDataPath}/${filePath}`;
filePath.replaceAll('//', '/');
logger.info(`Sync file from drive: Syncing file to path: ${filePath}`);
logger.info(`Sync file from drive: File id: ${fileId}`)
const dest = fs.createWriteStream(filePath);
let progress = 0;
this.drive.files.get({fileId, alt: 'media'}, {responseType: 'stream'}).then(res => {
console.log(res)
console.log(res.data)
toNodeReadable(res.data)
.on('end', () => {
console.log('Done downloading file.');
folderStructure.buildFileMenu()
resolve(dest)
})
.on('error', err => {
console.error('Error downloading file.');
reject(err);
})
.on('data', d => {
progress += d.length;
if (process.stdout.isTTY) {
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(`Downloaded ${progress} bytes`);
}
})
.pipe(dest);
});
}
My Goal is to upload an image taken from a webcam to a Lambda function which then uploads it to AWS S3.
The lambda function seems to work when I test it, however I can't work out what exactly needs to be sent through from the React Camera.
Or if I am sending through the right format to upload it.
import Camera from 'react-camera';
..
This is the JSX
<Camera
ref={(cam) => {
this.camera = cam;
}}
>
<Button onClick={this.takePicture}>
<i className="fas fa-camera"></i> Take photo
</Button>
</Camera>
This is the react code that is called when they take the photo
takePicture = () => {
this.camera.capture()
.then(blob => {
console.log(blob);
this.props.dispatch(uploadImage(blob))
})
}
The uploadImage function in my action is:
export const uploadImage = (fileObj) => dispatch => {
return fetch(url, {
method: 'POST',
headers: {
'Accept': 'image/jpeg'
},
body: fileObj
})
.then((response) => response.json())
.then(function (response) {
if (response.status === 'success') {
console.log(response);
// ... Show feedback
return response
} else {
// ... Show feedback
}
})
.catch((error) => {
console.error(error)
});
}
I figure I need to upload a base64 image..?
I don't understand how I get that from the blob
Here is the Lambda Function code for reference:
var params = {
Bucket: 'bucketName',
Key: Date.now() + '.jpg',
ContentType: 'image/jpeg',
Body: event.body,
ACL: "public-read"
};
return uploading = new Promise(function (resolve, reject) {
return s3.upload(params, function (err, data) {
if(err) {
state.uploadError = err
return reject({
error: err,
status: 'error',
message: 'something went wrong'
})
}
state.uploadData = data
state.fileLocation = data.Location
state.status = "success"
state.message = "File has been uploaded to the fileLocation"
return resolve(data)
});
})
Question:
How do I make the format of the blob correct so that when it's POSTed though as the body it will be the correct image format?
Very well organized question and code... thank you!
Updated:
Use the filesystem module to read the file and specify the encoding.
const fs = require('fs');
takePicture = () => {
this.camera.capture()
.then(blob => {
console.log(blob);
const image = fs.readFileSync(blob.path);
const b64Image = Buffer.from(image).toString('base64');
this.props.dispatch(uploadImage(b64image));
})
}