How to create disk from snapshot in google cloud function - node js - javascript

I have been struggling to find a solution for this particular problem. I've gone through almost all the documentation of gcloud/compute node module which is used in google cloud functions.
Now my challenge is to create a new disk from an existing snapshot in google cloud function.
I have used below code to create a disk. As they haven't provided any example to create a disk from a snapshot. Following cloud function creates a new disk named disk1 which is entirely fresh and new disk. I don't want that. I want to create a disk from an existing snapshot which has some data and setup in it.
exports.tempFunction = (req, res) => {
// Example input: {"message": "Hello!"}
const Compute = require(`#google-cloud/compute`);
const compute = new Compute();
const zone = compute.zone('us-central1-a');
const disk = zone.disk('disk1');
const config = {
// ...
//os:'ubuntu'
};
disk.create(config, function(err, disk, operation, apiResponse) {
// `disk` is a Disk object.
// `operation` is an Operation object that can be used to check the
// status of the request.
console.log(err);
console.log(disk);
console.log(operation);
console.log(apiResponse);
res.status(200).send("success");
});
};
Any help in this regard is highly appreciated.
P.S. I also tried using cloud APIs. But as I want only to use the cloud functions and I am unable to figure out that how do I get access token for gcloud to use inside cloud functions

The disk creation [1] can be customized by setting the disk resource fields [2] in the config object.
In this case, set the sourceSnapshot field in the config to the existing snapshot partial or full URL. The code should look like this:
exports.tempFunction = (req, res) => {
// Example input: {"message": "Hello!"}
const Compute = require(`#google-cloud/compute`);
const compute = new Compute();
const zone = compute.zone('us-central1-a');
const disk = zone.disk('disk1');
const config = {
sourceSnapshot: "projects/{YOUR-PROJECT}/global/snapshots/{YOUR_SNAPSHOT}"
};
disk.create(config, function(err, disk, operation, apiResponse) {
// `disk` is a Disk object.
// `operation` is an Operation object that can be used to check the
// status of the request.
console.log(err);
console.log(disk);
console.log(operation);
console.log(apiResponse);
res.status(200).send("success");
});
};

Related

How to delete a Firebase Storage folder from a Firebase Cloud Function?

I couldn't find the deleteFiles() method in the Firebase API reference. My IDE tells me this method takes an optional DeleteFilesOptions argument and I couldn't find any information on that type as well. If someone could point me to this documentation I would appreciate it.
That said, I've seen a number of posts that use this method, with this argument, to delete an entire Storage folder (and all of its files) through a Cloud Function. My question is, is this the correct way to do it (since the documentation here is missing)?
const functions = require("firebase-functions");
const admin = require("firebase-admin");
exports.deleteStorageFolder = functions.https.onCall(async (data, _context) => {
const uid = data.userId;
try {
const bucket = admin.storage().bucket(); // returns the default bucket, which is good
await bucket.deleteFiles({
prefix: `images/users/${uid}`, // the path of the folder
});
return Promise.resolve(true);
} catch (error) {
throw new functions.https.HttpsError("unknown", "Failed to delete storage folder.", error);
}
});
As #Doug already mentioned in the comment, "Firebase just provides wrappers around Cloud Storage. They are the same thing.". Also, according to this documentation, "Cloud Storage for Firebase stores your files in a Google Cloud Storage bucket, making them accessible through both Firebase and Google Cloud. This allows you the flexibility to upload and download files from mobile clients via the Firebase SDKs for Cloud Storage."
Having been said that, I've tried replicating the code snippet you've provided using deleteFiles(), and it worked fine on my end:
// // The Firebase Admin SDK to access Firestore.
const functions = require("firebase-functions");
const admin = require('firebase-admin');
const firebaseConfig = {
// Your Firebase configuration...
};
admin.initializeApp(firebaseConfig);
const bucket = admin.storage().bucket();
async function deleteFolder(){
await bucket.deleteFiles({
prefix: 'images/users/${uid}' // the path of the folder
});
}
deleteFolder();
One another option that you can do is to directly use Google Cloud Storage, and skip using the Firebase Storage:
const {Storage} = require('#google-cloud/storage');
const storage = new Storage();
const bucket = storage.bucket("your-bucket-name");
bucket.deleteFiles({
prefix: 'images/users/${uid}'
}, function(err) {
if (!err) {
console.log("All files in the `images` directory have been deleted.");
}
});
Just a note, following the suggestion of Doug, you can try and test it out first in your local or test environment. For further reference, you can refer to delete() and deleteFiles()

File retrieval from AWS S3 to Node.js server, and then to React client

I have a need to retrieve individual files from Node.js with Express.js. For that, I have installed aws-sdk, as well as #aws-sdk/client-s3. I am able to successfully fetch the file by using this simple endpoint:
const app = express(),
{ S3Client, GetObjectCommand } = require('#aws-sdk/client-s3'),
s3 = new S3Client({ region: process.env.AWS_REGION });
app.get('/file/:filePath', async (req,res) => {
const path_to_file = req.params.filePath;
try {
const data = await s3.send(new GetObjectCommand({ Bucket: process.env.AWS_BUCKET, Key: path_to_file }));
console.log("Success", data);
} catch (err) {
console.log("Error", err);
}
});
...but I have no idea how to return the data correctly to the React.js frontend so that the file can be further downloaded. I tried to look up the documentation, but it's looking too messy for me, and I can't even get what does the function return. .toString() method didn't help because it simply returns `"[object Object]" and nothing really else.
On React, I am using a library file-saver, which works with blobs and provides them for download using a filename defined by user.
Node v15.8.0, React v16.4.0, #aws-sdk/client-s3 v3.9.0, file-saver v2.0.5
Thanks for your tips and advices! Any help is highly appreciated.
Generally data from S3 is returned as a buffer. The file contents are part of the Body param in the response. You might be doing toString on the root object.
You need to use .toString() on the body param to make it a string.
Here is some sample code that might work for use case
// Note: I am not using a different style of import
const AWS = require("aws-sdk")
const s3 = new AWS.S3()
const Bucket = "my-bucket"
async getObject(key){
const data = await s3.getObject({ Bucket, key}).promise()
if (data.Body) {return data.Body.toString("utf-8") } else { return undefined}
}
To return this in express, you can add this to your route and pass the final data back once you have it.
res.end(data));
Consuming it in React should be the same as taking values from any other REST API.
I used Ravi's answer but caused a problem to display the image object in Frontend.
this worked fine:
const data = await s3.send(new GetObjectCommand({ Bucket: process.env.AWS_BUCKET, Key: path_to_file }));
data.Body.pipe(res);

How to save data from app engine to datastore google cloud javascript

I'm new to GCP and it's so difficult for me to understand it's documents. I deployed my web app on App Engine. When I run my app locally, I save some data on a JSON file and it's perfect. Now I need to save that JSON which is coming from client side to somewhere on Google cloud.
Based on my research I need to store my data on datastore. I need some clear example and explanation to learn how to store data from App Engine to data store in GCP.
Basically I'm looking for a way to store my JSON to pass it to another app later.
I appreciate any help or suggestion.
const port = process.env.PORT || 8000;
app.use(express.static(__dirname + '/www'));
app.listen(port);
console.log('working on port '+ port);
app.use(express.json({limit:'1mb'}));
app.post('/api', (request, response) => {
var ressult = JSON.stringify(request.body);
//creating my JSON file
fs.appendFile('Result.json', ressult + "\n", (err) => {
if (err) throw err;
})
});
So first of all the basics:
You'll need something like the following to initialize the client:
// Imports the Google Cloud client library
const {Datastore} = require('#google-cloud/datastore');
// Creates a client
const datastore = new Datastore();
Then to create a basic entity:
async function quickstart() {
// The kind for the new entity
const kind = 'Task';
// The name/ID for the new entity
const name = 'sampletask1';
// The Cloud Datastore key for the new entity
const taskKey = datastore.key([kind, name]);
// Prepares the new entity
const task = {
key: taskKey,
data: {
description: 'Buy milk',
},
};
// Saves the entity
await datastore.save(task);
console.log(`Saved ${task.key.name}: ${task.data.description}`);
}
quickstart();
So now that you can create the basic entity you have different options. If the JSON object is not too big you can put it as a value in the entity (store it as text)
Or the better approach is to store it as an array using something like this:
testArrayValue() {
// [START datastore_array_value]
const task = {
tags: ['fun', 'programming'],
collaborators: ['alice', 'bob'],
};
// [END datastore_array_value]
return this.datastore.save({
key: this.incompleteKey,
data: task,
});
}
Depending on your JSON you might even want to create nested arrays but the logic is the same.
You can also use Cloud Storage instead and simply treat the JSON file as an object. So you'll need to store it in the /tmp directory of GAE, upload it to the bucket. Then on the other side, download it to the /tmp dir of that app, and process it as a JSON file. Here are the basics on how to get started with Cloud Storage

Create a file in a cloud storage bucket from within a trigger

I would like to be able to create a file in a project Bucket as part of a Firestore cloud trigger.
When there is a change to a document on a specific collection I need to be able to take data from that document and write it to a file in a bucket in cloud storage
Example
exports.myFunction = functions.firestore
.document('documents/{docId}')
.onUpdate((change, context) => {
const after = change.after.data() as Document;
// CREATE AND WRITE TO file IN BUCKET HERE
});
I have found many examples on how to upload files. I have explored
admin.storage().bucket().file(path)
createWriteStream()
write()
But I can't seem to find documentation on how exactly to achieve the above.
Is this possible from within a trigger and if so where can I find documentation on how to do this?
Here is why I want to do this (just in case I am approaching this all wrong) . We have an application where our users are able to generate purchase orders for work they have done. At the time they initiate a generate from the software we need to create a timestamped document [pdf] (in a secure location but on that is accessible to authenticated users) representing this purchase order. The data to create this will come from the document that triggers the change.
As #Doug Stevenson said, you can use node streams.
You can see how to do this in this sample from the GCP getting started samples repo.
You need to provide a file name and the file buffer in order to stream it to GCS:
function sendUploadToGCS(req, res, next) {
if (!req.file) {
return next();
}
const gcsname = Date.now() + req.file.originalname;
const file = bucket.file(gcsname);
const stream = file.createWriteStream({
metadata: {
contentType: req.file.mimetype,
},
resumable: false,
});
stream.on('error', err => {
req.file.cloudStorageError = err;
next(err);
});
stream.on('finish', async () => {
req.file.cloudStorageObject = gcsname;
await file.makePublic();
req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
next();
});
stream.end(req.file.buffer);
}

Get image url Firebase storage (admin)

I have to upload an image to the firebase storage. I'm not using the web version of storage (I shouldn't use it). I am using the firebase admin.
No problem, I upload the file without difficulty and I get the result in the variable "file".
and if I access the firebase storage console, the image is there. all right.
return admin.storage().bucket().upload(filePath, {destination: 'demo/images/restaurantCover.jpg',
metadata:{contentType: 'image/jpeg'}
public: true
}).then(file =>{
console.log(`file --> ${JSON.stringify(file, null, 2)}`);
let url = file["0"].metadata.mediaLink; // image url
return resolve(res.status(200).send({data:file})); // huge data
}) ;
Now, I have some questions.
Why so much information and so many objects as a response to the upload () method? Reviewing the immense object, I found a property called mediaLink inside metadata and it is the download url of the image. but...
Why is the url different from the one shown by firebase? Why can not I find the downloadURL property?
How can get the url of firebase?
firebase: https://firebasestorage.googleapis.com/v0/b/myfirebaseapp.appspot.com/o/demo%2Fimages%2Fthumb_restaurant.jpg?alt=media&token=bee96b71-2094-4492-96aa-87469363dd2e
mediaLink: https://www.googleapis.com/download/storage/v1/b/myfirebaseapp.appspot.com/o/demo%2Fimages%2Frestaurant.jpg?generation=1530193601730593&alt=media
If I use the mediaLink url is there any problem with different urls? (read, update from ios and Web Client)
Looking at Google Cloud Storage: Node.js Client documentation, they have a link to sample code which shows exactly how to do this. Also, see the File class documentation example (below)
// Imports the Google Cloud client library
const Storage = require('#google-cloud/storage');
// Creates a client
const storage = new Storage();
/**
* TODO(developer): Uncomment the following lines before running the sample.
*/
// const bucketName = 'Name of a bucket, e.g. my-bucket';
// const filename = 'File to access, e.g. file.txt';
// Gets the metadata for the file
storage
.bucket(bucketName)
.file(filename)
.getMetadata()
.then(results => {
const metadata = results[0];
console.log(`File: ${metadata.name}`);
console.log(`Bucket: ${metadata.bucket}`);
console.log(`Storage class: ${metadata.storageClass}`);
console.log(`Self link: ${metadata.selfLink}`);
console.log(`ID: ${metadata.id}`);
console.log(`Size: ${metadata.size}`);
console.log(`Updated: ${metadata.updated}`);
console.log(`Generation: ${metadata.generation}`);
console.log(`Metageneration: ${metadata.metageneration}`);
console.log(`Etag: ${metadata.etag}`);
console.log(`Owner: ${metadata.owner}`);
console.log(`Component count: ${metadata.component_count}`);
console.log(`Crc32c: ${metadata.crc32c}`);
console.log(`md5Hash: ${metadata.md5Hash}`);
console.log(`Cache-control: ${metadata.cacheControl}`);
console.log(`Content-type: ${metadata.contentType}`);
console.log(`Content-disposition: ${metadata.contentDisposition}`);
console.log(`Content-encoding: ${metadata.contentEncoding}`);
console.log(`Content-language: ${metadata.contentLanguage}`);
console.log(`Metadata: ${metadata.metadata}`);
console.log(`Media link: ${metadata.mediaLink}`);
})
.catch(err => {
console.error('ERROR:', err);
});

Categories