Asterisk: create user with template via AMI - javascript

I need to modify sip.conf with AMI, adding a new user to it. Everything works fine, and I can create a user like this without problems:
[1000]
secret=pass12
But I have to create user with template like
[1000](mytemp)
secret=pass12
and I don't know how to do this. Neither Google, nor Digium forum can't help me.
P.S. I use JavaScript asterisk-manager to interact with Asterisk, and here is my code, which adds extension:
var amiAction = {
action: 'UpdateConfig',
reload: 'yes',
srcfilename: 'sip.conf',
dstfilename: 'sip.conf',
'action-000000': 'newcat',
'cat-000000': '1000',
'action-000001': 'append',
'cat-000001': '1000',
'var-000001': 'secret',
'value-000001': 'pass12'
};
ami.action(amiAction, function(err, resp) {
console.log(err, resp);
});

var amiAction = {
action: 'UpdateConfig',
reload: 'chan_sip',
srcfilename: 'sip.conf',
dstfilename: 'sip.conf',
'action-000000': 'newcat',
'cat-000000': '1000',
'options-000000': 'inherit=template-name'
};

I'm sure you've tried this, but:
'cat-000000': '1000 [(mytemp)]',
... should work just fine. If it does not, what error message does it throw?

Related

Google cloud dataflow job creation error: "Cannot set worker pool zone. Please check whether the worker_region experiments flag is valid"

I try to create a dataflow job to index a bigquery table into elasticSearchwith the node package google-cloud/dataflow.v1beta3.
The job is working fine when it's created and launched from the google cloud console, but I have the following error when I try it in node:
Error: 3 INVALID_ARGUMENT: (b69ddc3a5ef1c40b): Cannot set worker pool zone. Please check whether the worker_region experiments flag is valid. Causes: (b69ddc3a5ef1cd76): An internal service error occurred.
I tried to specify the experiments params in various ways but I always end up with the same error.
Does anyone managed to get a similar dataflow job working? Or do you have information about dataflow experiments?
Here is the code:
const { JobsV1Beta3Client } = require('#google-cloud/dataflow').v1beta3
const dataflowClient = new JobsV1Beta3Client()
const response = await dataflowClient.createJob({
projectId: 'myGoogleCloudProjectId',
location: 'europe-west1',
job: {
launch_parameter: {
jobName: 'indexation-job',
containerSpecGcsPath: 'gs://dataflow-templates-europe-west1/latest/flex/BigQuery_to_Elasticsearch',
parameters: {
inputTableSpec: 'bigQuery-table-gs-adress',
connectionUrl: 'elastic-endpoint-url',
index: 'elastic-index',
elasticsearchUsername: 'username',
elasticsearchPassword: 'password'
}
},
environment: {
experiments: ['worker_region']
}
}
})
Thank you very much for your help.
After many attempts I manage yesterday to find how to specify the worker region.
It looks like this:
await dataflowClient.createJob({
projectId,
location,
job: {
name: 'jobName',
type: 'Batch',
containerSpecGcsPath: 'gs://dataflow-templates-europe-west1/latest/flex/BigQuery_to_Elasticsearch',
pipelineDescription: {
inputTableSpec: 'bigquery-table',
connectionUrl: 'elastic-url',
index: 'elastic-index',
elasticsearchUsername: 'username',
elasticsearchPassword: 'password',
project: projectId,
appName: 'BigQueryToElasticsearch'
},
environment: {
workerPools: [
{ region: 'europe-west1' }
]
}
}
})
It's not working yet, I need to find the correct way to provide the other parameters, but now the dataflow job is created in the google cloud console.
For anyone who would be struggling with this issue, I finally found how to launch a dataflow job from a template.
There is a function launchFlexTemplate that work the same way as the job creation in the google cloud console.
Here is the final function working correctly:
const { FlexTemplatesServiceClient } = require('#google-cloud/dataflow').v1beta3
const response = await dataflowClient.launchFlexTemplate({
projectId: 'google-project-id',
location: 'europe-west1',
launchParameter: {
jobName: 'job-name',
containerSpecGcsPath: 'gs://dataflow-templates-europe-west1/latest/flex/BigQuery_to_Elasticsearch',
parameters: {
apiKey: 'elastic-api-key', //mandatory but not used if you provide username and password
connectionUrl: 'elasticsearch endpoint',
index: 'elasticsearch index',
elasticsearchUsername: 'username',
elasticsearchPassword: 'password',
inputTableSpec: 'bigquery source table', //projectid:datasetId.table
//parameters to upsert elasticsearch index
propertyAsId: 'table index use for elastic _id',
usePartialUpdate: true,
bulkInsertMethod: 'INDEX'
}
}

Wordpress custom block not showing in block inserter

I'm trying to create a custom block for a site but the block is not appearing in the editor dialogue. I've gone through multiple tutorials and changed my code a lot but it simply won't work.
What I've checked:
The block is added through a plugin but it also doesn't work when
moved to the theme.
I know the plugin is working correctly as I can use other wp
hooks/actions with no issues within the plugin.
I have tried using both 'init' & 'enqueue_block_assets' but neither
work.
I have verified all the file locations and paths are correct as I
have echoed them out to check.
I have changed to the default theme and it still does not appear.
Any help would be appreciated.
Here is the js block src (which is compiled):
import { registerBlockType } from '#wordpress/blocks'
registerBlockType('ghs/landing-page-block', {
title: 'Landing Page',
apiVersion: 2,
category: 'design',
icon: 'smiley',
description: 'Layout for the GHS landing page',
keywords: ['GHS', 'landing', 'page', 'front'],
edit: () => {
return (<div>hello</div>)
},
save: () => {
return (<div>hello</div>)
}
});
and the php registering it:
add_action('init', function() {
$asset_file = include( WP_PLUGIN_DIR . '/ghs-custom-blocks/assets/js/landing-page-block.asset.php');
wp_register_script('ghs-landing-page',
WP_PLUGIN_DIR . '/ghs-custom-blocks/assets/js/landing-page-block.js',
$asset_file['dependencies'],
$asset_file['version']);
register_block_type('ghs/landing-page-block', [
'api_version' => 2,
'editor_script' => 'ghs-landing-page',
]);
});
Solved it, it was because I was using WP_PLUGIN_DIR instead of plugin_dir_url(__FILE__). It meant the js request url was from the root instead of the wp installation.

Express/Mongoose is only saving partial request data to database

I think this is related to how I've defined my schemas, but I can't seem to find where the bug is... I have an almost identical file set up that's working perfectly and I've unfortunately not been able to find a duplicate of this issue anywhere.
When sending an API request to my local Express instance via Postman, only the 'title' request body value is stored in the database. I am sending the following simple request to my route as Application/Json (thought the same happens when using x-www-form-urlencoded):
{
"postTitle": "title goes here",
"postContent": "body goes here",
"isPublished": true
}
This is clearly being registered in express, as if I log the object I can see this data (plus timestamps and an _id):
{ _id: 5b07d9c0b8124e0599079c04,
postTitle: 'title goes here',
postContent: 'body goes here',
isPublished: true,
createdAt: 2018-05-25T09:39:12.869Z,
updatedAt: 2018-05-25T09:39:12.869Z,
__v: 0 }
However, when I send a get request to my route on this object using its ID, I receive the following in response:
{ "_id": "5b07d9c0b8124e0599079c04" }
Likewise, if I send a request to list all objects, I receive the following response:
{
"posts": [
{
"_id": "5b07d9c0b8124e0599079c04"
},
{
"_id": "5b07d9c0b8124e0599079c03"
},
{
"_id": "5b07d9914f10ce058f137eba"
}
]
}
Weirdly, sometimes the post title sent as part of the response is included in the response, and sometimes it isn't.
My schema is as follows:
var postSchema = new Schema({
postTitle: String,
postContent: String,
isPublished: Boolean
},
{
timestamps: true
});
My post API route for POST requests is as follows:
router.post('/posts', (req, res, next) => {
var postTitle = req.body.postTitle;
var postContent = req.body.postContent;
var isPublished = req.body.isPublished;
var newPost = new Post({
postTitle: postTitle,
postContent: postContent,
isPublished: isPublished
});
newPost.save(function (error) {
if (error) {
console.log(error)
}
res.send({
success: true,
message: 'Post saved successfully!'
})
})
});
(If you're not using Router, you'll have 'app.post' instead of 'router.post') Again, this is a bit longwinded but everything works fine.
My GET route is as follows:
router.get('/posts', (req, res) => {
Post.find({}, 'title content published', function (error, posts) {
if (error) { console.error(error); }
res.send({
posts: posts
})
}).sort({_id:-1})
});
OK - so, by going through my code in detail I've figured out where I was going wrong and fixed the issue, however, in my searching I found very little in the way of results. I'm pretty new to Express, so I'm going to outline the cause of the issue and how I resolved it in order to potentially save someone else a bunch of time if they make the same stupid mistake.
Now, the issue I'm having results from the way I was retrieving the data and serving that in response to get requests. As an example, here's my GET route to list all of the objects.
I was entirely focusing on the post request and assuming it was a problem with the database. It turns out what I'd actually done, is in order to make my schemas and routes less confusing, I'd changed the names of the relevant variables. What I'd forgotten to do, however, is update this line in my GET route to reflect the change:
Post.find({}, 'postTitle postContent isPublished', function (error, posts) {
Which I'd left as:
Post.find({}, 'title content published', function (error, posts) {
The reason the title sometimes displayed is that I tried undoing changes back and forth to spot the issue.
I know this is a super basic query but I got stuck on this for the best part of a day, and the only other relevant discussion on this ended with OP saying that it magically fixed itself.

Meteor - Method adding user custom field do not work

I am trying to add a custom "isContributor" field through a method to Meteor.Users but somehow it is not adding the field. (I am talking even before any safety check of who can do this update).
On client side I have the following event:
Template.Articles.events({
'click #BeContributor': function() {
userId = Meteor.userId();
Meteor.call('setContributorState', userId);
}
});
and in server/main.js the following:
Meteor.methods({
setContributorState: function(userId) {
Meteor.users.update(userId, {
$set: {
isContributor: true
}
});
}
});
Somehow it does not add the field for my user. No console or server errors. I guess I missed something in term of right to add-up the field. Any ideas ?
Thanks in advance.
--- EDIT ---
Actually the method was working but I did not publish back the result so I could not check in MeteorToys that the field was updated. With the following publication it works:
Meteor.publish(null, function(){
return Meteor.users.find({_id: this.userId}, {fields: {isContributor: 1}});
});
found at Publishing custom Meteor.user() fields
If you are going to modify the logged-in user's contributor status, you do not need to pass any parameter. This is a security hole. Server has the information who is calling the request.
Meteor.methods({
setContributorState: function() {
Meteor.users.update({_id:this.userId}, {
$set: {
isContributor: true
}
});
}
});

Using Sendgrid with node.js attachments are empty / broken

Although I have verified that the file does exist and is accessible, the email that is sent attaches an empty file. I have tried 3 different ways to attach the file, all that return success from the send json response. The code I'm currently using is as follows. The rk object is simply a namespace.
console.log(call.recording);
var email = new rk.sendgrid.Email({
to: '4namlet#gmail.com',
from: rk.config.email_address,
subject: 'RoadKid Feedback',
text: 'Someone left feedback on your driver.'//,
//files: [
// {
// contentType: 'audio/mpeg',
// url: call.recording
// }
//]
});
email.addFile({
filename: 'feedback.mp3',
contentType: 'audio/mpeg',
url: call.recording
});
rk.sendgrid.send(email, function(err, json) {
if (err) { return console.error(err); }
console.log(json);
});
An example url value is:
http://api.twilio.com/2010-04-01/Accounts/AC4a36110ce12a9cd68a947c87a3a6ab36/Recordings/RE568ecf17e4960105cd131507d49e182f.mp3
Turns out Sendgrid was working fine. (thanks guys for the quick response!) It was a scoping issue. For some reason call.recording was getting clobbered or called weird or something. After the console log I added a:
var recording_url = call.recording;
and theeen...
url: recording_url
And it all worked. :-/

Categories