404 on large file uploads using google drive api - javascript

I am trying to upload files to Google Drive via their api. When the file is over roughly 60MB it seems to be giving me a 404 or something went wrong screen.
I've tried several methods and noticed in 2015 alot of people were having similar issues because Google did not support resumable uploads from browser. I don't know if this has changed or not, but it seems to work fine for files under 60MB.
//const file = new File(['Hello, world!'], 'hello world.txt', { type: 'text/plain;charset=utf-8' });
const contentType = file.type || 'application/octet-stream';
const user = gapi.auth2.getAuthInstance().currentUser.get();
const oauthToken = user.getAuthResponse().access_token;
const initResumable = new XMLHttpRequest();
initResumable.open('POST', 'https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable', true);
initResumable.setRequestHeader('Authorization', 'Bearer ' + oauthToken);
initResumable.setRequestHeader('Content-Type', 'application/json');
initResumable.setRequestHeader('X-Upload-Content-Length', file.size);
initResumable.setRequestHeader('X-Upload-Content-Type', contentType);
initResumable.onreadystatechange = function () {
if (initResumable.readyState === XMLHttpRequest.DONE && initResumable.status === 200) {
const locationUrl = initResumable.getResponseHeader('Location');
const reader = new FileReader();
reader.onload = (e) => {
const uploadResumable = new XMLHttpRequest();
uploadResumable.open('PUT', locationUrl, true);
uploadResumable.setRequestHeader('Content-Type', contentType);
uploadResumable.setRequestHeader('X-Upload-Content-Type', contentType);
uploadResumable.onreadystatechange = function () {
if (uploadResumable.readyState === XMLHttpRequest.DONE && uploadResumable.status === 200) {
console.log(uploadResumable.response);
}
};
uploadResumable.send(reader.result);
};
reader.readAsArrayBuffer(file);
}
};
// You need to stringify the request body containing any file metadata
initResumable.send(JSON.stringify({
'name': file.name,
'mimeType': contentType,
'Content-Type': contentType,
'Content-Length': file.size
}));

Related

Uploading file to google drive via drive API messes up umlauts

I am uploading a file to Google Drive with the following code snippet using the Drive API, but when I look at the contents of the file in Google Drive, the umlauts are messed up.
chrome.identity.getAuthToken({ interactive: true }, token => {
var metadata = {
name: 'testname.json',
mimeType: 'application/json'
};
var fileContent = {
title: "Test Title",
notes: "Ää",
last_changed: "1641862146889",
url: "https://www.example.org",
poster_url: "https://www.example.org"
};
var file = new Blob([JSON.stringify(fileContent)], { type: 'application/json' });
var form = new FormData();
form.append('metadata', new Blob([JSON.stringify(metadata)], { type: 'application/json' }));
form.append('file', file);
for (var pair of form.entries()) {
console.log(pair[0], pair[1].text());
}
var xhr = new XMLHttpRequest();
xhr.open('POST', 'https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart');
xhr.setRequestHeader('Authorization', 'Bearer ' + token);
xhr.responseType = 'json';
xhr.onload = () => {
var fileId = xhr.response.id;
/* Do something with xhr.response */
console.log(xhr.response);
};
xhr.send(form);
});
Messed up umlauts(notes property):
{"title":"Test Title","notes":"Ää","last_changed":"1641862146889","url":"https://www.example.org","poster_url":"https://www.example.org"}
Does someone know how to prevent this from happening? After some troubleshooting, I also think it is possible that the problem has something to do with Blob or FormData.

How to Upload txt file with Cypress for API Testing - XMLHTTPRequest?

I'm trying to test an endpoint which will upload a file and give 200 response status code in cypress. As per some research cy.request cannot be used to upload a file for multipart/form-data so we need to use XMLHttp to upload such files. I have created below file to test the api but it doesn't work. Can someone please help what's wrong with my code ? Thank you.
Added below code under support/commands.ts(I will require a header to pass token from auth endpoint)
// Performs an XMLHttpRequest instead of a cy.request (able to send data as FormData - multipart/form-data)
Cypress.Commands.add('multipartFormRequest', (method,URL, formData,headers, done) => {
const xhr = new XMLHttpRequest();
xhr.open(method, URL);
xhr.setRequestHeader("accept", "application/json");
xhr.setRequestHeader("Content-Type", "multipart/form-data");
if (headers) {
headers.forEach(function(header) {
xhr.setRequestHeader(header.name, header.value);
});
}
xhr.onload = function (){
done(xhr);
};
xhr.onerror = function (){
done(xhr);
};
xhr.send(formData);
})
Test file to call multipartFormRequest:
const fileName = 'test_file.txt';
const method = 'POST';
const URL = "https://fakeurl.com/upload-file";
const headers = api.headersWithAuth(`${authToken}`);
const fileType = "application/text";
cy.fixture(fileName, 'binary').then((res) => {
const blob = Cypress.Blob.binaryStringToBlob(res, fileType);
const formData = new FormData();
formData.append('file', blob, fileName);
cy.multipartFormRequest(method, URL, headers, formData, function (response) {
expect(response.status).to.equal(200);
})
})
I'm getting this error message:-
Now, I'm getting status code as 0.
describe("Upload image", () => {
it("upload first image", () => {
const fileName = "image.jpeg";
const method = "POST";
const url = "https://api-demo.com/1";
const fileType = "image/jpeg";
cy.fixture(fileName, "binary")
.then((txtBin) => Cypress.Blob.binaryStringToBlob(txtBin))
.then((blob) => {
const formData = new FormData();
formData.append("image_data", blob, fileName);
formData.append("image_format", "jpeg");
cy.form_request(method, url, formData, function (response) {
expect(response.status).to.eq(200)
}
);
})
});
});
Cypress.Commands.add('form_request', (method, url, formData, done) => {
const xhr = new XMLHttpRequest();
xhr.open(method, url);
xhr.setRequestHeader("device", "331231");
xhr.setRequestHeader("city", "bangalore");
xhr.onload = function () {
done(xhr);
};
xhr.onerror = function () {
done(xhr);
};
xhr.send(formData);
})
Use
const blob = Cypress.Blob.binaryStringToBlob(res, fileType);
and remove the .then().
See Cypress.Blob
History
Version 5.0.0
Changes:
Return type of arrayBufferToBlob, base64StringToBlob, binaryStringToBlob, and dataURLToBlob methods changed from Promise<Blob> to Blob

Uploading a Folder to GDrive and get the Folder ID to use to upload files

Thanks for reading my question. I am working on the google drive api and can upload files from a text blob to the google drive. However, I need to add the folder ID manually and for users to use this and not get an error since they are trying to upload it into my folder. How can I create or just get a folder ID - maybe even upload to the root GDrive directory ? Any tips would be helpful.
Thanks
// Global vars
const SCOPE = 'https://www.googleapis.com/auth/drive.file';
const gdResponse = document.querySelector('#response');
const login = document.querySelector('#login');
const authStatus = document.querySelector('#auth-status');
const textWrap = document.querySelector('.textWrapper');
const addFolder = document.querySelector('#createFolder');
const uploadBtn = document.querySelector('#uploadFile');
// Save Button and Function
uploadBtn.addEventListener('click', uploadFile);
window.addEventListener('load', () => {
console.log('Loading init when page loads');
// Load the API's client and auth2 modules.
// Call the initClient function after the modules load.
gapi.load('client:auth2', initClient);
});
function initClient() {
const discoveryUrl =
'https://www.googleapis.com/discovery/v1/apis/drive/v3/rest';
// Initialize the gapi.client object, which app uses to make API requests.
// Get API key and client ID from API Console.
// 'scope' field specifies space-delimited list of access scopes.
gapi.client
.init({
apiKey: 'My-API-Key',
clientId:
'My-Client-ID',
discoveryDocs: [discoveryUrl],
scope: SCOPE,
})
.then(function () {
GoogleAuth = gapi.auth2.getAuthInstance();
// Listen for sign-in state changes.
GoogleAuth.isSignedIn.listen(updateSigninStatus);
// Actual upload of the file to GDrive
function uploadFile() {
let accessToken = gapi.auth.getToken().access_token; // Google Drive API Access Token
console.log('Upload Blob - Access Token: ' + accessToken);
let fileContent = document.querySelector('#content').value; // As a sample, upload a text file.
console.log('File Should Contain : ' + fileContent);
let file = new Blob([fileContent], { type: 'application/pdf' });
let metadata = {
name: 'Background Sync ' + date, // Filename
mimeType: 'text/plain', // mimeType at Google Drive
// For Testing Purpose you can change this File ID to a folder in your Google Drive
parents: ['Manually-entered-Folder-ID'], // Folder ID in Google Drive
// I'd like to have this automatically filled with a users folder ID
};
let form = new FormData();
form.append(
'metadata',
new Blob([JSON.stringify(metadata)], { type: 'application/json' })
);
form.append('file', file);
let xhr = new XMLHttpRequest();
xhr.open(
'post',
'https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart&fields=id'
);
xhr.setRequestHeader('Authorization', 'Bearer ' + accessToken);
xhr.responseType = 'json';
xhr.onload = () => {
console.log(
'Upload Successful to GDrive: File ID Returned - ' + xhr.response.id
); // Retrieve uploaded file ID.
gdResponse.innerHTML =
'Uploaded File. File Response ID : ' + xhr.response.id;
};
xhr.send(form);
}
I didn't include some of the unrelated stuff. I have something like this for the uploading of a folder and it's not working unsurprisingly.
function createFolder() {
var fileMetadata = {
name: 'WordQ-Backups',
mimeType: 'application/vnd.google-apps.folder',
};
drive.files.create(
{
resource: fileMetadata,
fields: 'id',
},
function (err, file) {
if (err) {
// Handle error
console.error(err);
} else {
console.log('Folder Id: ', file.id);
}
}
);
}
I believe your goal as follows.
You want to upload a file to the root folder or the created new folder.
For this, how about the following modification patterns?
Pattern 1:
When you want to put the file to the root folder, please try the following modification.
From:
parents: ['Manually-entered-Folder-ID'],
To:
parents: ['root'],
or, please remove parents: ['Manually-entered-Folder-ID'],. By this, the file is created to the root folder.
Pattern 2:
When you want to create new folder and put the file to the created folder, please try the following modification. In your script, unfortunately, I'm not sure about drive in createFolder() from your question. So I cannot understand about the issue of your createFolder(). So in this pattern, the folder is created with XMLHttpRequest.
Modified script:
function createFile(accessToken, folderId) {
console.log('File Should Contain : ' + fileContent);
let fileContent = document.querySelector('#content').value;
console.log('File Should Contain : ' + fileContent);
let file = new Blob([fileContent], { type: 'application/pdf' });
let metadata = {
name: 'Background Sync ' + date,
mimeType: 'text/plain',
parents: [folderId], // <--- Modified
};
let form = new FormData();
form.append(
'metadata',
new Blob([JSON.stringify(metadata)], { type: 'application/json' })
);
form.append('file', file);
let xhr = new XMLHttpRequest();
xhr.open(
'post',
'https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart&fields=id'
);
xhr.setRequestHeader('Authorization', 'Bearer ' + accessToken);
xhr.responseType = 'json';
xhr.onload = () => {
console.log(
'Upload Successful to GDrive: File ID Returned - ' + xhr.response.id
);
gdResponse.innerHTML =
'Uploaded File. File Response ID : ' + xhr.response.id;
};
xhr.send(form);
}
function createFolder(accessToken) {
const folderName = "sample"; // <--- Please set the folder name.
let metadata = {
name: folderName,
mimeType: 'application/vnd.google-apps.folder'
};
let xhr = new XMLHttpRequest();
xhr.open('post', 'https://www.googleapis.com/drive/v3/files?fields=id');
xhr.setRequestHeader('Authorization', 'Bearer ' + accessToken);
xhr.setRequestHeader('Content-Type', 'application/json');
xhr.responseType = 'json';
xhr.onload = () => {
const folderId = xhr.response.id;
console.log(folderId);
createFile(accessToken, folderId);
};
xhr.send(JSON.stringify(metadata));
}
// At first, this function is run.
function uploadFile() {
let accessToken = gapi.auth.getToken().access_token;
createFolder(accessToken);
}
Reference:
Files: create

facebook video upload error 1363030

When I try to upload, I get the following error:
Error code:1363030,
msg: Your video upload timed out before it could be completed. This is probably because of a slow network connection or because the video you're trying to upload is too large. Please try again.
I'm using Facebook Javascript SDK 2.5.
What am I missing or wrong?
<script>
var files;
var fileData = '';
function handleFileSelect(evt) {
files = evt.target.files; // FileList object
var input = evt.target;
var reader = new FileReader();
reader.onload = function (e) {
fileData = e.target.result;
};
reader.readAsDataURL(input.files[0]);
// files is a FileList of File objects. List some properties.
var output = [];
for (var i = 0, f; f = files[i]; i++) {
output.push('<li class="list-group-item">', escape(f.name), '(', f.type || 'n/a', ') - ',
f.size, ' bytes','</li>');
}
document.getElementById('list').innerHTML = output.join('');
}
document.getElementById('files').addEventListener('change', handleFileSelect, false);
$(document).ready(function()
{
$("#upload").click(function(){
var token = $('#token').val();
FB.api(
"/me/videos",
"POST",
{
"access_token" : token,
"title" : 'test',
"source": fileData
},
function (response) {
if (response && !response.error) {
/* handle the result */
}
}
);
})
});
</script>
here is the sample site
Just hit this in Node.js. This Facebook error occurs if you don't specify the content type and file name of the attached file (i.e. if you pass it as an inline field value, not as an attached file).
Not sure how to do it via FB.api, but with request-promise module (and ES7 async functions via Babel) it looks like this:
import request from 'request-promise'
async function uploadVideoToFacebook (buf) {
let url = 'https://graph-video.facebook.com/v2.5/' + pageId + '/videos?access_token=' + pageToken
let formData = {
title: 'Video title',
description: 'Timeline message...',
source: {
value: buf,
options: {
filename: 'video.mp4',
contentType: 'video/mp4'
}
}
}
return await request({ method: 'POST', url, formData })
}
and with XMLHttpRequest on the client-side you would do something like:
var blob = new Blob(videoDataHere, { type: 'video/mp4' })
var formData = new FormData();
formData.append('source', blob);
formData.append('message', 'Spartan Overlay');
var ajax = new XMLHttpRequest()
ajax.onreadystatechange = ...
ajax.open('POST', 'https://graph.facebook.com/' + userId + '/videos?access_token=' + accessToken, true)
ajax.send(formData)

Saving blob (might be data!) returned by AJAX call to Azure Blob Storage creates corrupt image

If I post a PDF to my vendors API, they return me a .png file as a blob (see update 2 as I am now unsure if they are returning blob data).
I would like to push this into Azure Blob Storage. Using my code listed below, it pushes something in, but the file is corrupted. Example: downloading the .png from Azure Blob Storage and trying to open it with Paint gives the following error:
This is not a valid bitmap file, or its format is not currently
supported.
I have verified that the image is sent to me correctly as the vendor is able to open the .png on their side. I am wondering if I need to convert this to base64 or save it to a local Web directory before uploading it to Azure Blob Storage.
Here is my Angular front end Controller that calls my Node/Express backend for uploading to Azure once it receives the returned "image":
$.ajax({
url: 'http://myvendorsapi.net/uploadPDF,
type: "POST",
data: formdata,
mimeType: "multipart/form-data",
processData: false,
contentType: false,
crossDomain: true,
success: function (result) {
var containerName = 'container1';
var filename = 'Texture_0.png';
var file = result;
$http.post('/postAdvanced', { containerName: containerName, filename: filename, file: file }).success(function (data) {
console.log("success!");
}, function (err) {
//console.log(err);
});
},
error: function (error) {
console.log("Something went wrong!");
}
})
}
Here is my Node/Express backend that uploads the blob to Azure Blob Storage. It gives no error, but the file can't be opened/gives the error stated above when opened in Paint:
app.post('/postAdvanced', function (req, res, next) {
var containerName = req.body.containerName;
var filename = req.body.filename;
var file = req.body.file;
blobSvc.createBlockBlobFromText(containerName, filename, file, function (error, result, response) {
if (!error) {
res.send(result);
}
else {
console.log(error);
}
});
})
Update 1: The answer provided here allows me to pass in the URL of the vendors API for some endpoints: Download file via Webservice and Push it to Azure Blob Storage via Node/Express
It works as it writes the file at the endpoint to a temp folder. In my current scenario, I upload a PDF file and it returns an image file that I need to upload to Azure Blob Storage. Is there a way to use the answer here, but adjust it for a file that I already have (since it is returned to me) versus file streaming from a URL?
Update 2: In console logging the returned "file", it looks like it may be data. I am not sure, it looks like this:
Is this actually data, and if so, how do I make this into a file for upload?
UPDATE 3:
Since it appears that jQuery AJAX can't manage binary returns. I am able to "open" the blob using XMLHTTPResponse as follows, but I can't seem to push this into Azure as it gives me the following error:
TypeError: must start with number, buffer, array or string
Here is my request. Note that the file opens properly:
var form = document.forms.namedItem("fileinfo");
form.addEventListener('submit', function (ev) {
var oData = new FormData(form);
var xhr = new XMLHttpRequest();
xhr.responseType = "arraybuffer";
xhr.open("POST", "http://myvendorsapi/Upload", true);
xhr.onload = function (oEvent) {
if (xhr.status == 200) {
var blob = new Blob([xhr.response], { type: "image/png" });
var objectUrl = URL.createObjectURL(blob);
window.open(objectUrl);
console.log(blob);
var containerName = boxContainerName;
var filename = 'Texture_0.png';
$http.post('/postAdvanced', { containerName: containerName, filename: filename, file: blob }).success(function (data) {
//console.log(data);
console.log("success!");
}, function (err) {
//console.log(err);
});
} else {
oOutput.innerHTML = "Error " + xhr.status + " occurred when trying to upload your file.<br \/>";
}
};
xhr.send(oData);
ev.preventDefault();
}, false);
createBlockBlobFromText will work with either string or buffer. You might need a buffer to hold the binary content due to a known issue of jQuery.
For a workaround, there are several options:
Option 1: Reading binary filesusing jquery ajax
Option 2: Use native XMLHttpRequest
Option 3: Write frontend with Node as well and browserify it.
Your frontend code may look like:
var request = require('request');
request.post('http://myvendorsapi.net/uploadPDF', function (error, response, body) {
if (!error && response.statusCode == 200) {
var formData = {
containerName: 'container1',
filename: 'Texture_0.png',
file: body
};
request.post({ uri: '/postAdvanced', formData: formData }, function optionalCallback(err, httpResponse, body) {
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
});
} else {
console.log('Get snapshot failed!');
}
});
Then the backend code may look like:
app.post('/postAdvanced', function (req, res, next) {
var containerName = req.body.containerName;
var filename = req.body.filename;
var file = req.body.file;
if (!Buffer.isBuffer(file)) {
// Convert 'file' to a binary buffer
}
var options = { contentType: 'image/png' };
blobSvc.createBlockBlobFromText(containerName, filename, file, options, function (error, result, response) {
if (!error) {
res.send(result);
} else {
console.log(error);
}
});
})
Below I have the code to upload the image as binary in angular using FormData.
The server code will be the code to handle a regular file upload via a form.
var form = document.forms.namedItem("fileinfo");
form.addEventListener('submit', function (ev) {
var oData = new FormData(form);
var xhr = new XMLHttpRequest();
xhr.responseType = "arraybuffer";
xhr.open("POST", "http://vendorapi.net/Upload", true);
xhr.onload = function (oEvent) {
if (xhr.status == 200) {
var blob = new Blob([xhr.response], { type: "image/png" });
//var objectUrl = URL.createObjectURL(blob);
//window.open(objectUrl);
//console.log(blob);
var formData = new FormData()
formData.append('file', blob);
formData.append('containerName', boxContainerName);
formData.append('filename', 'Texture_0.png');
$http.post('/postAdvancedTest', formData, {
transformRequest: angular.identity,
headers: {'Content-Type': undefined}
}).success(function (data) {
//console.log(data);
console.log("success!");
// Clear previous 3D render
$('#webGL-container').empty();
// Generated new 3D render
$scope.generate3D();
}, function (err) {
//console.log(err);
});
} else {
oOutput.innerHTML = "Error " + xhr.status + " occurred when trying to upload your file.<br \/>";
}
};
xhr.send(oData);
ev.preventDefault();
}, false);
I have solved the issue (thanks to Yang's input as well). I needed to base64 encode the data on the client side before passing it to node to decode to a file. I needed to use XMLHTTPRequest to get binary data properly, as jQuery AJAX appears to have an issue with returning (see here: http://www.henryalgus.com/reading-binary-files-using-jquery-ajax/).
Here is my front end:
var form = document.forms.namedItem("fileinfo");
form.addEventListener('submit', function (ev) {
var oData = new FormData(form);
var xhr = new XMLHttpRequest();
xhr.responseType = "arraybuffer";
xhr.open("POST", "http://vendorapi.net/Upload", true);
xhr.onload = function (oEvent) {
if (xhr.status == 200) {
var blob = new Blob([xhr.response], { type: "image/png" });
//var objectUrl = URL.createObjectURL(blob);
//window.open(objectUrl);
console.log(blob);
var blobToBase64 = function(blob, cb) {
var reader = new FileReader();
reader.onload = function() {
var dataUrl = reader.result;
var base64 = dataUrl.split(',')[1];
cb(base64);
};
reader.readAsDataURL(blob);
};
blobToBase64(blob, function(base64){ // encode
var update = {'blob': base64};
var containerName = boxContainerName;
var filename = 'Texture_0.png';
$http.post('/postAdvancedTest', { containerName: containerName, filename: filename, file: base64}).success(function (data) {
//console.log(data);
console.log("success!");
// Clear previous 3D render
$('#webGL-container').empty();
// Generated new 3D render
$scope.generate3D();
}, function (err) {
//console.log(err);
});
})
} else {
oOutput.innerHTML = "Error " + xhr.status + " occurred when trying to upload your file.<br \/>";
}
};
xhr.send(oData);
ev.preventDefault();
}, false);
Node Backend:
app.post('/postAdvancedTest', function (req, res) {
var containerName = req.body.containerName
var filename = req.body.filename;
var file = req.body.file;
var buf = new Buffer(file, 'base64'); // decode
var tmpBasePath = 'upload/'; //this folder is to save files download from vendor URL, and should be created in the root directory previously.
var tmpFolder = tmpBasePath + containerName + '/';
// Create unique temp directory to store files
mkdirp(tmpFolder, function (err) {
if (err) console.error(err)
else console.log('Directory Created')
});
// This is the location of download files, e.g. 'upload/Texture_0.png'
var tmpFileSavedLocation = tmpFolder + filename;
fs.writeFile(tmpFileSavedLocation, buf, function (err) {
if (err) {
console.log("err", err);
} else {
//return res.json({ 'status': 'success' });
blobSvc.createBlockBlobFromLocalFile(containerName, filename, tmpFileSavedLocation, function (error, result, response) {
if (!error) {
console.log("Uploaded" + result);
res.send(containerName);
}
else {
console.log(error);
}
});
}
})
})

Categories