No 'Access-Control-Allow-Origin' header is present when creating BlobServiceWithSas - javascript

This is the first time I use Azure Storage JS API. I have followed instruction on this Microsoft tutorial.
I generate the SAS key on the node server with successful results but I still get the authentication failed error. I'm using the libraries provided by Microsoft Azure. How may I fix this?
function test() {
Restangular.all('cdn/sas').post({container: 'photos'}).then(function (sas) {
var blobUri = 'https://hamsar.blob.core.windows.net';
var blobService = AzureStorage.createBlobServiceWithSas(blobUri, sas.token);
blobService.listContainersSegmented(null, function (error, results) {
if (error) {
// List container error
} else {
// Deal with container object
}
});
}, function (error) {
console.log("Error generating SAS: ", error);
});
}
Error messages:

According to your error message, I found you create a Service SAS token. But if you want to list all the container name in your storage account. You need use account SAS token.
Notice: You could also use the blobService.listBlobsSegmented, you should make sure your service sas token has the permission to list the blob file and set the container name.
Like this:
blobService.listBlobsSegmented('mycontainer', null, function (error, results)
If you want to list all the container, I suggest you could follow these codes to generate the Account SAS.
Code like this :
var getPolicyWithFullPermissions = function(){
var startDate = new Date();
var expiryDate = new Date();
startDate.setTime(startDate.getTime() - 1000);
expiryDate.setTime(expiryDate.getTime() + 24*60*60*1000);
var sharedAccessPolicy = {
AccessPolicy: {
Services: AccountSasConstants.Services.BLOB +
AccountSasConstants.Services.FILE +
AccountSasConstants.Services.QUEUE +
AccountSasConstants.Services.TABLE,
ResourceTypes: AccountSasConstants.Resources.SERVICE +
AccountSasConstants.Resources.CONTAINER +
AccountSasConstants.Resources.OBJECT,
Permissions: AccountSasConstants.Permissions.READ +
AccountSasConstants.Permissions.ADD +
AccountSasConstants.Permissions.CREATE +
AccountSasConstants.Permissions.UPDATE +
AccountSasConstants.Permissions.PROCESS +
AccountSasConstants.Permissions.WRITE +
AccountSasConstants.Permissions.DELETE +
AccountSasConstants.Permissions.LIST,
Protocols: AccountSasConstants.Protocols.HTTPSORHTTP,
Start: startDate,
Expiry: expiryDate
}
};
return sharedAccessPolicy;
};
var sharedAccessSignature = azure.generateAccountSharedAccessSignature(environmentAzureStorageAccount, environmentAzureStorageAccessKey, getPolicyWithFullPermissions );
Then you could use the account SAS to list the account's container.
Result:
More details about the difference between service sas and account sas, you could refer to this article.

Related

How can we record Twilio outboud call initiating from browser to phone?

I am using javascript client by Twilio to make calls to any number.
But I am not able to find any solution for this.
How do we record an outbound call from browser to a phone, any server-side or client-side solution for this?
I was able to place call successfully using quickstart.js file
using below code
document.getElementById('button-call').onclick = function () {
try {
if (document.getElementById('CallTo').value != '') {
var params = {
To: "+" + document.getElementById('CallTo').value,
record: 'record-from-ringing-dual'
};
log('Calling ' + params.To + '...');
console.log('Calling ' + params.To + '...');
if (device) {
var outgoingConnection = device.connect(params);
outgoingConnection.on('ringing', function () {
log('Ringing...');
document.getElementById('hdnCallIDs').value = outgoingConnection.parameters.CallSid;
log(document.getElementById('hdnCallIDs').value);
});
}
} else {
log('Enter Dialing number...');
}
}
catch (err) {
log(err.message);
}
};
You will put the TwiML, for the Dial Verb, to Record the calls returned by the Voice URL of your TwiML Application. You instruct the Twilio clients to use a specific TwiML Application SID when creating their access tokens.
record

Azure blob storage sas token generation using js

I have this function running in an azure function to get a sas token for a browser application to upload to azure blob storage:
var azure = require('azure-storage');
module.exports = function(context, req) {
if (req.body.container) {
// The following values can be used for permissions:
// "a" (Add), "r" (Read), "w" (Write), "d" (Delete), "l" (List)
// Concatenate multiple permissions, such as "rwa" = Read, Write, Add
context.res = generateSasToken(
context,
req.body.container,
req.body.blobName,
req.body.permissions
);
} else {
context.res = {
status: 400,
body: "Specify a value for 'container'"
};
}
context.done(null, context);
};
function generateSasToken(context, container, blobName, permissions) {
var connString = process.env.AzureWebJobsStorage;
var blobService = azure.createBlobService(connString);
// Create a SAS token that expires in an hour
// Set start time to five minutes ago to avoid clock skew.
var startDate = new Date();
startDate.setMinutes(startDate.getMinutes() - 5);
var expiryDate = new Date(startDate);
expiryDate.setMinutes(startDate.getMinutes() + 60);
permissions = azure.BlobUtilities.SharedAccessPermissions.READ +
azure.BlobUtilities.SharedAccessPermissions.WRITE +
azure.BlobUtilities.SharedAccessPermissions.DELETE +
azure.BlobUtilities.SharedAccessPermissions.LIST;
var sharedAccessPolicy = {
AccessPolicy: {
Permissions: permissions,
Start: startDate,
Expiry: expiryDate
}
};
var sasToken = blobService.generateSharedAccessSignature(
container,
blobName,
sharedAccessPolicy
);
context.log(sasToken);
return {
token: sasToken,
uri: blobService.getUrl(container, blobName, sasToken, true)
};
}
I am then calling this url in the client and I try and upload with this code:
const search = new URLSearchParams(`?${token}`);
const sig = encodeURIComponent(search.get('sig'));
const qs = `?sv=${search.get('sv')}&ss=b&srt=sco&sp=rwdlac&se=${search.get('sv')}&st=${search.get(
'st'
)}&spr=https&sig=${sig}`;
return `${url}/${containerName}/${filename}${qs}`;
Which generates a url like this:
https://mystorage.blob.core.windows.net/mycontainer/latest.png?sv=2018-03-28&ss=b&srt=sco&sp=rwdlac&se=2018-03-28&st=2019-01-30T19:11:10Z&spr=https&sig=g0sceq3EkiAQTvyaZ07C+C4SZQz9FaGTV4Zwq4HkAnc=
Which returns this error:
403 (Server failed to authenticate the request. Make sure the value of
Authorization header is formed correctly including the signature.)
If I generate the sas token from the azure portal it works, so the generated url looks like this:
https://mystorage.blob.core.windows.net/mycontainer/latest.png?sv=2018-03-28&ss=b&srt=sco&sp=rwdlac&se=2019-01-31T03:01:43Z&st=2019-01-30T19:01:43Z&spr=https&sig=ayE4gt%2FDfDzjv5DjMaD7AS%2F176Bi4Q6DWJNlnDzl%2FGc%3D
but my url looks like this:
https://mystorage.blob.core.windows.net/mycontainer/latest.png?sv=2018-03-28&ss=b&srt=sco&sp=rwdlac&se=2019-01-31T03:34:21Z&st=2019-01-30T19:34:21Z&spr=https&sig=Dx8Vm4XPnD1rn9uyzIAXZEfcdbWb0HjmOq%2BIq42Q%2FOM%3D
I have no idea what to do to get this working
Your Azure Function code is correct, and
var sasToken = blobService.generateSharedAccessSignature(
container,
blobName,
sharedAccessPolicy
);
is exactly the sasToken you need to upload blob. No need to process the token again(mishandle actually) as you have done in the 2nd code snippet.
It's expected that the sas token from the Azure portal(Account SAS) is different from the one generated in your code(Service SAS). Have a look at the doc.
To conclude,
Make sure the connection string belongs to the Storage you want to connect. You could avoid trouble and directly replace var connString = process.env.AzureWebJobsStorage; with var connString = "connectionStringGotFromPortal";
If 1 is confirmed, your Azure function code is correct and returns token as expected
{
token: sasToken,
uri: blobService.getUrl(container, blobName, sasToken, true)
};
Based on the 2nd code snippet you provide, you only need
return `${url}/${containerName}/${filename}?${token}`;
if the token is identical to what function returns.
The problem is that in your server-side code you're creating a Service SAS and then taking only signature portion of the code (sig) and creating an Account SAS on the client.
Since the parameters used to create token has now changed (in the original one, you didn't have parameters like ss, srt etc. but when you're creating your own URL, you're inserting these parameters), when you use the modified SAS URL you will get 403 error. This is happening because server again computes the signature based on the URL parameters and compare that with the signature passed in the URL. Since the two signatures won't match, you're getting the 403 error.
Since you're returning the SAS URL of the blob, there's no need for you to create the URL on the client. You can simply use the uri you're returning from your API layer on the client and use that to upload.
As Jerry Liu's answer explained your Azure function generates the correct token and already gives you the the correct uri to use which includes your blob name and token.
In your client side you can also use azure-sdk-for-js
// This is the response from your api with token and uri
const uri = response.uri;
const pipeline = StorageURL.newPipeline(new AnonymousCredential());
// Your uri already includes the full blob url with SAS signature
const blockBlobURL = BlockBlobURL.fromBlobURL(new BlobURL(uri, pipeline));
const uploadBlobResponse = await blockBlobURL.upload(
Aborter.none,
file,
file.size,
{ blobHTTPHeaders: { blobContentType: `${mime}; charset=utf-8`} }
);

Node azure blobService.generateSharedAccessSignature() returns an incorrect token

I'm trying to generate a shared access signature with the azure node SDK, but the returned token is not working/missing some params.
Im creating the token with:
app.get('/node-api/get-azure-token', (req, res, next) => {
var blobService = azure.createBlobService(myStorageAccount, myAccessKey);
var container = 'my-azure-container-name';
var startDate = new Date();
var expiryDate = new Date(startDate);
expiryDate.setMinutes(startDate.getMinutes() + 10);
startDate.setMinutes(startDate.getMinutes() - 5);
var sharedAccessPolicy = {
AccessPolicy: {
ServiceVersion: '2016-05-31',
Permissions: azure.BlobUtilities.SharedAccessPermissions.READ + azure.BlobUtilities.SharedAccessPermissions.WRITE + azure.BlobUtilities.SharedAccessPermissions.DELETE + azure.BlobUtilities.SharedAccessPermissions.LIST + azure.BlobUtilities.SharedAccessPermissions.ADD + azure.BlobUtilities.SharedAccessPermissions.CREATE,
Start: startDate,
Expiry: expiryDate,
Protocols: 'https,http'
}
};
const key = blobService.generateSharedAccessSignature(container, req.query.bloburi, sharedAccessPolicy);
res.send(key);
});
The error 'Server failed to authenticate the request. Make sure the value of Authorization header is formed correctly including the signature.' is thrown when using the generated sas token.
Has anyone come across this issue?
I just figured it out! The order of permissions in the AccessPolicy object needs to be in the order shown here: https://learn.microsoft.com/en-us/rest/api/storageservices/Constructing-a-Service-SAS?redirectedfrom=MSDN
We adapted your code to our convenience and it works.
We give 100 mins of reading permissions (we use this to privatise images) and we moved the storage account name, access key, and container into global variables inside the .env file.
var azure = require('azure-storage');
var GetSharedAccessToken = function (blobName) {
const blobService = azure.createBlobService(process.env.EXPRESS_API_AZURE_STORAGE_ACCOUNT_NAME, process.env.EXPRESS_API_AZURE_STORAGE_ACCOUNT_ACCESS_KEY);
const containerName = process.env.EXPRESS_API_AZURE_STORAGE_CONTAINER_NAME;
return new Promise((resolve, reject) => {
try {
var startDate = new Date();
var expiryDate = new Date(startDate);
expiryDate.setMinutes(startDate.getMinutes() + 100);
startDate.setMinutes(startDate.getMinutes() - 100);
var sharedAccessPolicy = {
AccessPolicy: {
Permissions: azure.BlobUtilities.SharedAccessPermissions.READ,
Start: startDate,
Expiry: expiryDate
}
};
var token = blobService.generateSharedAccessSignature(containerName, blobName, sharedAccessPolicy);
var sasUrl = blobService.getUrl(containerName, blobName, token);
resolve(sasUrl);
}
catch (error) {
reject(error);
}
})}
The blobName in our case is the path in Azure of the image and we dynamically pass it when the user clicks on one of them.
Obviously, make sure you set your container's permissions to "Private" (no anonymous access) in "change access level".

s3.copyObject does not apply ServerSideEncryption to object in target bucket?

I'm attempting to perform cross-account backups of any objects from one bucket on ACCOUNT-A to a backup bucket on ACCOUNT-B and I want the objects in the backup bucket to be encrypted using AES256. But the encryption doesn't seem to be getting applied to the objects that land in the backup bucket.
The Setup
ACCOUNT-A has a source bucket called assets.myapp.com
ACCOUNT-B has a target bucket called backup-assets.myapp.com
An s3.ObjectCreated:* bucket event on the assets.myapp.com bucket triggers a Lambda function to copy the newly created object to the backup-assets.myapp.com bucket under ACCOUNT-B.
Attempting to apply ServerSideEncryption: 'AES256' to the objects in the backup-assets.myapp.com bucket once they land there.
The Lambda Function Code
var async = require('async');
var aws = require('aws-sdk');
var s3 = new aws.S3({ apiVersion: '2006-03-01' });
exports.backupObject = function backupS3Object(event, context) {
if (event.Records === null) {
return context.fail('NOTICE:', 'No records to process.');
}
async.each(event.Records, function(record, iterate) {
var sourceBucket = record.s3.bucket.name;
var targetBucket = 'backup-' + record.s3.bucket.name;
var key = record.s3.object.key;
s3.copyObject({
Bucket : targetBucket,
CopySource : sourceBucket + '/' + key,
Key : key,
ACL : 'private',
ServerSideEncryption : 'AES256',
MetadataDirective : 'COPY',
StorageClass : 'STANDARD_IA'
}, function(error, data) {
if (error) return iterate(error);
console.log('SSE: ' + data.ServerSideEncryption);
console.log('SUCCESS: Backup of ' + sourceBucket + '/' + key);
return iterate();
});
}, function (error) {
if (error) {
return context.fail('ERROR:', 'One or more objects could not be copied.');
}
return context.done();
});
};
Cloudwatch Log Reports Success
When the function runs, the object is successfully copied, and the Cloudwatch Log for my Lambda function reports the ServerSideEncryption used as AES256.
However, The S3 Console Disagrees
But the problem is that when I inspect the Properties > Details of the copied object in the backup-assets.myapp.com bucket under ACCOUNT-B it reports Server Side Encryption: None.
Any idea why the SSE doesn't seem to be applied to the object when it lands in the backup-assets.myapp.com bucket? Or is it actually being applied and I've just discovered a display bug in the S3 Console?
BONUS QUESTION
When I attempt to apply SSE:AES256 to any given object manually
using the console, I get the following error: The additional
properties (RRS/SSE) were not enabled or disabled due to errors for
the following objects in backup-assets.myapp.com:
copied-object-one.txt.
Thanks in advance for any help.
Figured this out.
The problem was with the ACL parameter of the copyObject method.
If you want to use ServerSideEnryption: 'AES256' on the objects that land in the target bucket you must provide an ACL that allows bucket-owner-full-control to allow your backup bucket to apply the encryption. This is not documented anywhere (that I found), but I've done extensive testing now (not by choice) and determined that this does work. So the working Lambda function code is below:
var async = require('async');
var aws = require('aws-sdk');
var s3 = new aws.S3({ apiVersion: '2006-03-01' });
exports.backupObject = function backupS3Object(event, context) {
if (event.Records === null) {
return context.done('NOTICE: No records to process.');
}
async.each(event.Records, function(record, iterate) {
var sourceBucket = record.s3.bucket.name;
var targetBucket = 'backup-' + record.s3.bucket.name;
var key = record.s3.object.key;
s3.copyObject({
Bucket : targetBucket,
CopySource : sourceBucket + '/' + key,
Key : key,
ACL : 'bucket-owner-full-control',
ServerSideEncryption : 'AES256',
MetadataDirective : 'COPY',
StorageClass : 'STANDARD_IA'
}, function(error, data) {
if (error) return iterate(error);
console.log('SUCCESS: Backup of ' + sourceBucket + '/' + key);
return iterate();
});
}, function (error) {
return context.done(error);
});
};
I'm not sure if this is possible using the X-region X-account replication method discussed in the comments in the question above. There doesn't seem to be any way to declare SSE when performing a replication.

How do I Receive Messages Using Sinch Javascript Instant Messaging SDK?

While trying to build a web application using the Sinch Instant Messaging SDK I ran into an issue of not being able receive instant messages using the latest Javascript Instant Messaging SDK found here. I have also been following along this tutorial to help build my app that I think uses a different version of the SDK where instant messages can be received. However, the SDK version in the tutorial does not let me use generated userTickets for authentication for my application, while the latest SDK version does.
So, I was wondering if there was a way to either use generated userTickets for the SDK found in the tutorial or receive instant messages using the latest SDK.
On the latest SDK I have tried setting supportActiveConnection to true during configuration in order to receive messages using the code in the tutorial with no success. Here are some of the relevant code snippets from the tutorial to receive messages:
sinchClient = new SinchClient({
applicationKey: 'APP_KEY',
capabilities: {
messaging: true
},
supportActiveConnection: true,
});
var loginObject = {
username: username,
password: password
};
sinchClient.start(loginObject, function() {
global_username = username;
showPickRecipient();
}).fail(handleError);
var messageClient = sinchClient.getMessageClient();
var eventListener = {
onIncomingMessage: function(message) {
if (message.senderId == global_username) {
$('div#chatArea').append('<div>' + message.textBody + '</div>');
} else {
$('div#chatArea').append('<div style="color:red;">' + message.textBody + '</div>');
}
}
}
messageClient.addEventListener(eventListener);
The authentication ticket is generated by a python back-end through the following function and handler:
def getAuthTicket(username):
userTicket = {
'identity': {'type': 'username', 'endpoint': username},
'expiresIn': 3600,
'applicationKey': APPLICATION_KEY,
'created': datetime.utcnow().isoformat()
}
userTicketJson = json.dumps(userTicket).replace(" ", "")
userTicketBase64 = base64.b64encode(userTicketJson)
# TicketSignature = Base64 ( HMAC-SHA256 ( ApplicationSecret, UTF8 ( UserTicketJson ) ) )
digest = hmac.new(base64.b64decode(
APPLICATION_SECRET), msg=userTicketJson, digestmod=hashlib.sha256).digest()
signature = base64.b64encode(digest)
# UserTicket = TicketData + ":" + TicketSignature
signedUserTicket = userTicketBase64 + ':' + signature
return {"userTicket": signedUserTicket}
class TicketHandler(BaseHandler):
def get(self):
self.response.write(getAuthTicket(self.username))
Then on the client side I call a get request on the ticket handler.
$.get('/ticket', function(authTicket) {
sinchClient.start(eval("(" + authTicket + ")"))
.then(function() {
console.log("success");
})
.fail(function(error) {
console.log("fail");
});
});
The error I get when I try to start the start the Sinch client using the sinch.min.js file found in the tutorial is a "no valid identity or authentication ticket".

Categories