Google drive always exports blank pdf via API - javascript

I have a Google presentation on my Gdrive and I want to export it programatically to PDF. It works fine, but the downloaded file is always blank! Yet with the right number of pages.
Here's my code
function exportFile(auth, id) {
const drive = google.drive({
version: "v3",
auth: auth
});
drive.files.export(
{
fileId: id,
mimeType: "application/pdf"
},
(err, res) => {
if (err) {
console.log(err);
} else {
fs.writeFile("local.pdf", res.data, function(err) {
if (err) {
return console.log(err);
}
});
}
}
);
}
fs.readFile("credentials.json", (err, content) => {
if (err) return console.log("Error loading client secret file:", err);
// Authorize a client with credentials, then call the Google drive API.
authorize(JSON.parse(content), auth => {
exportFile(auth, "1mtxWDrPCt8EL_UoSUbrLv38Cu8_8LUm0onSv0MPCIbk");
});
});
and here's the generated file with the correct number of slides (2) but blank content:
Any idea what I'm missing? Thanks a lot!

From your question, I could understand that you have already been able to export the file from Google Drive with Drive API. So how about this modification?
Modified script:
When your script is modified, please modify exportFile() as follows. Please use responseType as follows.
function exportFile(auth, id) {
const drive = google.drive({
version: "v3",
auth: auth
});
drive.files.export(
{
fileId: id,
mimeType: "application/pdf"
},
{ responseType: "arraybuffer" }, // Added
(err, res) => {
if (err) {
console.log(err);
} else {
fs.writeFile("local.pdf", Buffer.from(res.data), function(err) { // Modified
if (err) {
return console.log(err);
}
});
}
}
);
}
Note:
In this case, it supposes that you are using the latest googleapis.
References:
Request-level options
google-api-nodejs-client
If this was not the direction you want, I apologize.

#Tanaike is a life saver, thank you so much! Based on your solution I came to this which also works:
const writingFile = util.promisify(fs.writeFile);
const pdf = await drive.files.export(
{ fileId: id, mimeType: 'application/pdf' },
{ responseType: 'arraybuffer' }
);
await writingFile('some document.pdf', Buffer.from(pdf.data), 'binary');
For people preferring async / await instead of callbacks.

Related

File content missing when i download from s3

I am using node.js aws sdk for s3 related methods. I have a method to download the file from s3 bucket.
I am downloading the file using the below code.
const downloadFileBase64 = async (payload) => {
let params = { Bucket: s3BucketName, Key: `${payload.folderName}/${payload.fileName}` };
try {
const response = await s3
.getObject(params, (err) => {
if (err) {
return err;
}
})
.promise();
return {
data: response.Body.toString('base64'),
fileName: payload.fileName
};
} catch (error) {
return Boom.badRequest(error.message);
}
};
Once i get the base64 content i am sending it over an email using sendgrid.
Issue: When i download small files everything is working fine. But when i download large files, some part of the file is missing in multiple pages. I just copy pasted the base64 in few online websites and downloaded the file from there, it's the same issue in those websites also. With this i concluded that there is some issue while returning the response from s3 itself. When i go to s3 and check it in the folder, it's showing proper file.
If you see the above screenshot, its the pdf which is having some random grey background in few pages and some text is also missing from the pdf.
I tried to use another method which just download buffer excluding the base64 conversion as shown below.
const downloadFileBuffer = async (payload) => {
let params = { Bucket: s3BucketName, Key: `${payload.folderName}/${payload.fileName}` };
try {
const response = await s3
.getObject(params, (err) => {
if (err) {
return err;
}
})
.promise();
return {
data: response.Body,
fileName: payload.fileName
};
} catch (error) {
return Boom.badRequest(error.message);
}
};
And once i get the file content in this above response, i am storing temporarily in a folder on server and then reading again and sending over email. But i am still having the same issue.
const fileContent = await docs.downloadFileBuffer({ payload: req.payload.action.dire });
await fs.writeFileSync(`${temp}testinggg.pdf`, fileContent?.data);
const fileData = await fs.readFileSync(`${temp}testinggg.pdf`, { encoding: 'base64' });
Any help on this issue is really appreciated.
After days of research and trying different ways, I found the issue. The issue was with .promise() used in s3.getObject(params, (err) => {}).promise();. Instead of that, I used callback using Promise as shown below. Now the file is properly showing the full content without missing any data.
const downloadFileBuffer = async (payload) => {
let params = { Bucket: s3BucketName, Key: `${payload.folderName}/${payload.fileName}` };
try {
return new Promise((resolve, reject) => {
s3.getObject(params, (err, response) => {
if (err) {
reject(err);
}
resolve({
data: response.Body,
fileName: payload.fileName
});
});
});
} catch (error) {
return Boom.badRequest(error.message);
}
};

How to download a file with Node.js from google drive api

How to download a file with Node.js from google drive api
I don't need anything special. I only want to download a file from a GoogleDrive, and then save it to a given directory of client.
app.get("/download",function(req,res){
const p38290token = new google.auth.OAuth2(CLIENT_ID, CLIENT_SECRET, REDIRECT_URI);
p38290token.setCredentials({ refresh_token: token.acc });
const p38290Id = google.drive({
version: "v3",
auth: p38290token,
});
var dest = fs.createWriteStream("./test.png");
try {
p38290Id.files.get({
fileId: "1daaxy0ymKbMro-e-JnexmGvM4WzW-3Hn",
alt: "media"
}, { responseType: "stream" },
(err, res) => {
res.data
.on("end", () => {
console.log("Done");
})
.on("error", err => {
console.log("Error", err);
})
.pipe(dest); // i want to sent this file to client who request to "/download"
}
)
} catch (error) {
}
})
I want to do that just someone come to www.xyz.com/download and file will be download automatically
The issue seems to be with this line:
var dest = fs.createWriteStream("./test.png");
You are using a file system command which is meant to interact with files on the server. Your question makes it clear that you wish for express to deliver the contents of the file over to the client making the HTTP request.
For that you can just use the res parameter of the route callback function. You declare it on this line:
app.get("/download",function(req,res){
In your case I'd remove the dest variable completely and simply pipe the file to res like so:
.pipe(dest);
Have a look at this answer as well.

How to download dynamic files from google drive

noob question, I'm just getting started with Google Drive API v3. How can I download dynamic file from google drive when I only have fileId. file can be, image, pdf, or docs.
I tried searching but I couldn't found any reference or example related to this.
This what I have so far but it only download specific file extension.
downloadFile(req, res) {
const auth = new google.auth.JWT(
client_email,
null,
private_key,
SCOPES,
);
const { fileId } = req.params;
const drive = google.drive({ version: 'v3', auth});
var dest = fs.createWriteStream('./tmp/downloads/dummy.pdf')
drive.files.get({
fileId,
alt: 'media',
}, {
responseType: 'stream'
}).then((driveResponse) => {
driveResponse.data.on('end', () => {
console.log(`downloading fileID ${fileId}`);
})
.on('error', (err) => {
console.log(err);
})
.on('data', (d) => {
console.log(d);
})
.pipe(dest)
})
.catch((err) => {
console.log(err);
})
}
Is there way to download dynamic files from google drive?
I believe your goal as follows.
You want to download the files from Google Drive using the service account and the file ID.
The files include both Google Docs files and the files except for Google Docs files.
You want to achieve this using googleapis for Node.js.
Modification points:
Unfortunately, from it only download specific file extension., I cannot understand about the detail of your situation. But I guess that the reason of your issue might be due to downloading both Google Docs files and the files except for Google Docs files.
When Google Docs files are downloaded, the files are required to be downloaded using the method of "Files: export" in Drive API.
When the files except for Google Docs files are downloaded, the files are required to be downloaded using the method of "Files: get" in Drive API.
I thought that above situation might be the reason of your issue.
In order to download both Google Docs files and the files except for Google Docs files, I propose the following flow.
Check the mimeType of the file ID.
Download the file using each method by the mimeType.
When above points are reflected to your script, it becomes as follows.
Modified script:
From:
var dest = fs.createWriteStream('./tmp/downloads/dummy.pdf')
drive.files.get({
fileId,
alt: 'media',
}, {
responseType: 'stream'
}).then((driveResponse) => {
driveResponse.data.on('end', () => {
console.log(`downloading fileID ${fileId}`);
})
.on('error', (err) => {
console.log(err);
})
.on('data', (d) => {
console.log(d);
})
.pipe(dest)
})
.catch((err) => {
console.log(err);
})
To:
drive.files.get({ fileId, fields: "*" }, async (err, { data }) => {
if (err) {
console.log(err);
return;
}
let filename = data.name;
const mimeType = data.mimeType;
let res;
if (mimeType.includes("application/vnd.google-apps")) {
const convertMimeTypes = {
"application/vnd.google-apps.document": {
type:
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
ext: ".docx",
},
"application/vnd.google-apps.spreadsheet": {
type:
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
ext: ".xlsx",
},
"application/vnd.google-apps.presentation": {
type:
"application/vnd.openxmlformats-officedocument.presentationml.presentation",
ext: ".pptx",
},
};
filename += convertMimeTypes[mimeType].ext;
res = await drive.files.export(
{
fileId,
mimeType: convertMimeTypes[mimeType].type,
},
{ responseType: "stream" }
);
} else {
res = await drive.files.get(
{
fileId,
alt: "media",
},
{ responseType: "stream" }
);
}
const dest = fs.createWriteStream(filename);
res.data
.on("end", () => console.log("Done."))
.on("error", (err) => {
console.log(err);
return process.exit();
})
.pipe(dest);
});
Note:
In this modification, I prepared 3 types of Google Docs files at convertMimeTypes. When you want to download other mimeTypes, please modify convertMimeTypes. In this case, for example, Google Docs files are downloaded as Microsoft Docs files.
References:
Download files
Files: get
Files: export

Turning image into blob in React Native (Expo) and uploading to S3 bucket

I have a camera component which I clicks a picture. I store the clicked picture using expo's FileSystem in the local cacheDirectory. Looks some thing like this:
onPictureSaved = async photo => {
await FileSystem.moveAsync({
from: photo.uri,
to: `${FileSystem.cacheDirectory}test.jpg`
});}
My next step is to my next stop is converting the image in the local cacheDirectory into a blob and upload the image into S3 via the aws-sdk:
var params = {
Bucket: "my-bucket",
Key: 'test.jpg',
Body: blob
};
s3.upload(params, function(err, data) {
if (err) {
console.log(err);
} // an error occurred
else {
console.log(data);
} // successful response
}
However, any methods or modules I install in order to accomplish this tiny step in the process hasn't been working at all. I can't use RNFS, react-native-fetch-blob or any other modules that require linking thanks to the expo client. I don't want to detach expo just for one thing. Is there any other way to accomplish this?
Take a look at https://github.com/expo/image-upload-example/issues/3#issuecomment-387263080. The latest expo release supports blobs, so then you can do something like the following:
uploadToS3 = async (fileUri, s3Bucket, s3Key) => {
const response = await fetch(fileUri);
const blob = await response.blob();
return new Promise((resolve, reject) => {
const params = {
Bucket: s3Bucket,
Key: s3Key,
Body: blob,
};
s3.upload(params, function(err, data) {
if (err) {
console.log('Something went wrong');
console.log(err);
reject(err);
} else {
console.log('Successfully uploaded image');
resolve(data);
}
});
});
};
Hope this helps!

How to upload multiple files to GoogleDrive API using for/loop (forEach)

I use the Google Drive API to upload multiple files.
I faced with a problem running out of RAM while uploading multiples files. I try to use forEach (for loop) for my code to avoid uploading multiple files at the same time, but It doesn't work the way I expected. It always loop through the entire of list files and upload the same time.
I try to use async/await syntax to block the loop but It didn't work the way I expected.
Here is my code:
const fs = require("fs");
const readline = require("readline");
const { google } = require("googleapis");
let files = ["file1.mp4", "file2.mp4"];
const SCOPES = ["https://www.googleapis.com/auth/drive.metadata.readonly"];
const TOKEN_PATH = "token.json";
fs.readFile("credentials.json", (err, content) => {
if (err) return console.log("Error loading client secret file:", err);
// Authorize a client with credentials, then call the Google Drive API.
authorize(JSON.parse(content), uploadFiles);
});
function authorize(credentials, callback) {
const { client_secret, client_id, redirect_uris } = credentials.installed;
const oAuth2Client = new google.auth.OAuth2(
client_id,
client_secret,
redirect_uris[0]
);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, (err, token) => {
if (err) return getAccessToken(oAuth2Client, callback);
oAuth2Client.setCredentials(JSON.parse(token));
callback(oAuth2Client);
});
}
function getAccessToken(oAuth2Client, callback) {
const authUrl = oAuth2Client.generateAuthUrl({
access_type: "offline",
scope: SCOPES
});
console.log("Authorize this app by visiting this url:", authUrl);
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question("Enter the code from that page here: ", code => {
rl.close();
oAuth2Client.getToken(code, (err, token) => {
if (err) return console.error("Error retrieving access token", err);
oAuth2Client.setCredentials(token);
// Store the token to disk for later program executions
fs.writeFile(TOKEN_PATH, JSON.stringify(token), err => {
if (err) console.error(err);
console.log("Token stored to", TOKEN_PATH);
});
callback(oAuth2Client);
});
});
}
async function uploadFiles(auth) {
for (file of files) {
var fileMetadata = {
name: file
};
var media = {
body: fs.createReadStream("test/" + file)
};
google.drive({ version: "v3", auth });
const result = await drive.files.create(
{
resource: fileMetadata,
media: media,
fields: "id"
},
function(err, fileid) {
if (err) {
// Handle error
console.error(err);
} else {
console.log("File Id: ", fileid.data.id);
console.log("Uploaded..:" + file);
}
}
);
console.log("Uploading file..:" + file);
}
}
I just want to ask why the loop does not upload single files each?
I try to use forEach (for loop) for my code to avoid uploading multiple files at the same time
You can't, the process is entirely asynchronous. You passed a callback as an argument to the function drive.files.create.
By the way, if you want to use async/await, you should wrap your function into a promisified one.
function myCreateFunc (fileInfos) {
return new Promise((resolve, reject) => {
google.drive.create(filesInfos, function callback(err, fileId) {
if(err)
reject(err)
resolve(fileId)
})
});
}
Even after a long time, I will post my answer because I had the same problem.
Requirements:
Enable APIS and Services for your project.
Create a service account and download the key.
If you need to upload your files into a folder, you need the folder id.
Install googleapis using npm install googleapis.
Follow these steps to get your folder id.
To find folder id, you need to provide permission to the service account user.
Share it with edit access and get the link to the folder.
You will see something like this: https://drive.google.com/drive/u/1/folders/xxxxxXXXXXXxxxxxxxXXXXXXX
In this case folder id is xxxxxXXXXXXxxxxxxxXXXXXXX
const { google } = require("googleapis");
var fs = require("fs");
const KEYFILEPATH = "path to your keyfile.json";
const SCOPES = ["https://www.googleapis.com/auth/drive"];
const auth = new google.auth.GoogleAuth({
keyFile: KEYFILEPATH,
scopes: SCOPES,
});
const uploadFiles = async (auth) => {
const driveService = google.drive({ version: "v3", auth });
let count = 0;
for (const file of fs.readdirSync(
"full file path to your images/files folder"
)) {
// Log the file name.
console.log(JSON.stringify(file));
let fileMetadata = {
name: file,
parents: ["folder id"], //Optional
};
let media = {
mimeType: "image/jpeg",
body: fs.createReadStream(
`full file path to your images/files folder/${file}`
),
};
const task = driveService.files.create({
resource: fileMetadata,
media: media,
fields: "id",
});
try {
await task;
count = count + 1;
} catch (e) {
console.log(e);
return null;
}
}
// log the total count of uploaded files.
console.log("Count :", count);
return;
};
uploadFiles(auth).catch(console.error);

Categories