I'm trying to use the last.fm API in javascript but I can't get it working. I found this GitHub reference which I thought would be useful: https://github.com/fxb/javascript-last.fm-api
var cache = new LastFMCache();
but this ^ causes an error "ReferenceError: LastFMCache is not defined." I've tried putting the files from https://github.com/fxb/javascript-last.fm-api in the same directory as the file I'm running. I'm running it with node.js - I don't know if that would be a problem. Running the file with node.js before was not a problem though. Any idea how I can remedy this problem? I can't tell if I'm missing some code or my files are in the wrong directory. Thanks for your help!
You are using client-side JavaScript library.
If you will browse the NPM, there are plenty of Last.fm modules, the most popular is simple-lastfm
Here is the example taken from the docs:
var Lastfm = require('simple-lastfm');
var lastfm = new Lastfm({
api_key: 'xxx',
api_secret: 'xxx',
username: 'xxx',
password: 'xxx'
});
lastfm.getSessionKey(function(result) {
console.log("session key = " + result.session_key);
if(result.success) {
lastfm.scrobbleNowPlayingTrack({
artist: 'Ratatat',
track: 'Seventeen Years',
callback: function(result) {
console.log("in callback, finished: ", result);
}
});
} else {
console.log("Error: " + result.error);
}
});
Related
I try to create a dataflow job to index a bigquery table into elasticSearchwith the node package google-cloud/dataflow.v1beta3.
The job is working fine when it's created and launched from the google cloud console, but I have the following error when I try it in node:
Error: 3 INVALID_ARGUMENT: (b69ddc3a5ef1c40b): Cannot set worker pool zone. Please check whether the worker_region experiments flag is valid. Causes: (b69ddc3a5ef1cd76): An internal service error occurred.
I tried to specify the experiments params in various ways but I always end up with the same error.
Does anyone managed to get a similar dataflow job working? Or do you have information about dataflow experiments?
Here is the code:
const { JobsV1Beta3Client } = require('#google-cloud/dataflow').v1beta3
const dataflowClient = new JobsV1Beta3Client()
const response = await dataflowClient.createJob({
projectId: 'myGoogleCloudProjectId',
location: 'europe-west1',
job: {
launch_parameter: {
jobName: 'indexation-job',
containerSpecGcsPath: 'gs://dataflow-templates-europe-west1/latest/flex/BigQuery_to_Elasticsearch',
parameters: {
inputTableSpec: 'bigQuery-table-gs-adress',
connectionUrl: 'elastic-endpoint-url',
index: 'elastic-index',
elasticsearchUsername: 'username',
elasticsearchPassword: 'password'
}
},
environment: {
experiments: ['worker_region']
}
}
})
Thank you very much for your help.
After many attempts I manage yesterday to find how to specify the worker region.
It looks like this:
await dataflowClient.createJob({
projectId,
location,
job: {
name: 'jobName',
type: 'Batch',
containerSpecGcsPath: 'gs://dataflow-templates-europe-west1/latest/flex/BigQuery_to_Elasticsearch',
pipelineDescription: {
inputTableSpec: 'bigquery-table',
connectionUrl: 'elastic-url',
index: 'elastic-index',
elasticsearchUsername: 'username',
elasticsearchPassword: 'password',
project: projectId,
appName: 'BigQueryToElasticsearch'
},
environment: {
workerPools: [
{ region: 'europe-west1' }
]
}
}
})
It's not working yet, I need to find the correct way to provide the other parameters, but now the dataflow job is created in the google cloud console.
For anyone who would be struggling with this issue, I finally found how to launch a dataflow job from a template.
There is a function launchFlexTemplate that work the same way as the job creation in the google cloud console.
Here is the final function working correctly:
const { FlexTemplatesServiceClient } = require('#google-cloud/dataflow').v1beta3
const response = await dataflowClient.launchFlexTemplate({
projectId: 'google-project-id',
location: 'europe-west1',
launchParameter: {
jobName: 'job-name',
containerSpecGcsPath: 'gs://dataflow-templates-europe-west1/latest/flex/BigQuery_to_Elasticsearch',
parameters: {
apiKey: 'elastic-api-key', //mandatory but not used if you provide username and password
connectionUrl: 'elasticsearch endpoint',
index: 'elasticsearch index',
elasticsearchUsername: 'username',
elasticsearchPassword: 'password',
inputTableSpec: 'bigquery source table', //projectid:datasetId.table
//parameters to upsert elasticsearch index
propertyAsId: 'table index use for elastic _id',
usePartialUpdate: true,
bulkInsertMethod: 'INDEX'
}
}
I am creating an application using asp.net mvc and javascript in which I want to create folders inside my existing google drive folder.
below is my code which I got from stackoverflow
function createFolder() {
var body = {
'title': document.getElementById('txtFolderName').value,
'mimeType': "application/vnd.google-apps.folder"
};
var request = gapi.client.drive.files.insert({
'resource': body
});
request.execute(function (resp) {
console.log('Folder ID: ' + resp.id);
});
}
I am getting the below error
index.html:61 Uncaught TypeError: Cannot read properties of undefined
(reading 'files')
on the following line
var request = gapi.client.drive.files.insert({
here gapi.client.drive is appearing to be undefined
below is my code to authenticate and load google api client
function authenticate(callback) {
return gapi.auth2.getAuthInstance()
.signIn({ scope: "https://www.googleapis.com/auth/documents https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.file" })
.then(function () {
console.log("Sign-in successful");
callback == undefined ? '' : callback();
},
function (err) {
console.error("Error signing in", err);
});
}
function loadClient() {
gapi.client.setApiKey("APIKEY");
return gapi.client.load("https://docs.googleapis.com/$discovery/rest?version=v1")
.then(function () {
console.log("GAPI client loaded for API");
},
function (err) {
console.error("Error loading GAPI client for API", err);
});
}
what is the problem here?
and what I need to do if I want to create a folder inside another folder?
thanks in advance
I thought that in your script, https://docs.googleapis.com/$discovery/rest?version=v1 of gapi.client.load("https://docs.googleapis.com/$discovery/rest?version=v1") is used for Google Docs API v1. I think that the reason for your error message of Uncaught TypeError: Cannot read properties of undefined (reading 'files') is due to this.
In your goal, it seems that you want to create a folder into a specific folder. In this case, please use Drive API. But, when I saw your current script for creating the folder, Drive API v2 is used. So, please modify as follows.
From:
return gapi.client.load("https://docs.googleapis.com/$discovery/rest?version=v1")
To:
return gapi.client.load("https://www.googleapis.com/discovery/v1/apis/drive/v2/rest")
By this modification, I thought that your createFolder() works. But in your current createFolder(), the folder is created to the root folder. When you want to create the folder into a specific folder, please modify the request body as follows.
var body = {
'title': document.getElementById('txtFolderName').value,
'mimeType': "application/vnd.google-apps.folder",
'parents': [{'id': '###folderId###'}]
};
Note:
As additional information, if you want to use Drive API v3, please modify it as follows.
From
return gapi.client.load("https://docs.googleapis.com/$discovery/rest?version=v1")
To
return gapi.client.load("https://www.googleapis.com/discovery/v1/apis/drive/v3/rest")
And, please modify createFolder() as follows.
var body = {
'name': document.getElementById('txtFolderName').value,
'mimeType': "application/vnd.google-apps.folder",
'parents': ['###folderId###']
};
var request = gapi.client.drive.files.create({ 'resource': body });
References:
Files: insert of Drive API v2
Files: create of Drive API v3
I am trying to use the cordova social sharing plugin for sharing video on social sites. So far what I have achieved is, I have successfully captured video using following code -
var options = {
limit: 1,
duration: 15
};
$cordovaCapture.captureVideo(options).then(function (videoData) {
$scope.videoUrl = videoData[0].fullPath;
}, function (err) {
// An error occurred. Show a message to the user
//alert("video error : "+err);
});
I can successfully find the captured video files url but unfortunately I can not share them to the social media sites. I have tried both of the following methods -
$cordovaSocialSharing
.share(message, subject, file, link)
and
$cordovaSocialSharing
.shareViaTwitter(message, image, link)
Now my question is -
Is there any way to share video through this approach?
If not, please let me know if there is any possible way for this.
N.B. : I have already bothered the Google a lot.
Thanks in advance.
my problem was passing a bad filePath, so i found a solution like below :
import {CaptureError, MediaFile, MediaCapture, CaptureImageOptions, Transfer} from "ionic-native";`
declare let cordova: any;
private static options = {
message: '', // not supported on some apps (Facebook, Instagram)
subject: '', // for email
files: [''], // an array of filenames either locally or remotely
url: ''
};
videoOptions: CaptureImageOptions = {limit: 1};
videoData: any;
captureVideo() {
MediaCapture.captureVideo(this.videoOptions)
.then(
(data: MediaFile[]) => {
this.videoData = data[0];
const fileTransfer = new Transfer();
fileTransfer.download(this.videoData.fullPath, cordova.file.applicationStorageDirectory + 'fileDir/filename.mp4').then((entry) => {
this.options.message = " Your message";
this.options.subject = "Your Subject";
this.options.files = [entry.toURL()];
this.options.url = "https://www.google.com.tr/";
SocialSharing.shareWithOptions(this.options);
}, (error) => {
});
},
(err: CaptureError) => {
}
);
}
As you see above, i just copy my video file to applicationStorageDirectory
I have a pdf file located here:
/server/.files/users/test.pdf
When I display a link on a page, I'd like for the user to be able to click the link, and for the pdf to be rendered on the screen.
I've read through this SO post in particular, and others, but cannot seem to get things to work: SO Link
I tried using an IR route server side, but every time I try even something simple I get the following error:
Error: Meteor.userId can only be invoked in method calls. Use this.userId in publish functions. at Object.Meteor.userId
(packages/accounts-base/accounts_server.js:19:1) at Object.Meteor.user
(packages/accounts-base/accounts_server.js:24:1) at [object
Object].Router.onBeforeAction.except
(app/both/3-router/routes.js:10:15) at
packages/iron:router/lib/router.js:277:1 at [object
Object]._.extend.withValue (packages/meteor/dynamics_nodejs.js:56:1)
at [object Object].hookWithOptions
(packages/iron:router/lib/router.js:276:1) at boundNext
(packages/iron:middleware-stack/lib/middleware_stack.js:251:1) at
runWithEnvironment (packages/meteor/dynamics_nodejs.js:108:1) at
packages/meteor/dynamics_nodejs.js:121:1 at [object Object].dispatch
(packages/iron:middleware-stack/lib/middleware_stack.js:275:1)
Line: #10 in my router.js file is the first if statement here:
Router.onBeforeAction(function () {
if (!Meteor.user() || Meteor.loggingIn()) {
this.redirect('welcome.view'); } else {
Meteor.call("userFileDirectory", function (error, result) {
if (error)
throw error;
else
console.log(result);
});
this.next();
} }, { except: ['welcome.view'] });
I tried this:
Router.map(function() {
this.route('serverFile', {
where: 'server',
path: /^\/uploads_url_prefix\/(.*)$/,
action: function() {
var filePath = process.env.PWD + '/.files/users/' + this.params[1];
var data = fs.readFileSync(filePath);
this.response.writeHead(200, {
'Content-Type': 'image'
});
this.response.write(data);
this.response.end();
}
}); });
But I'm not sure what to put in the path.
With process.env.PWD you are in the directory of your meteor project.
so you should be able to access your file like this:
var file = process.env.PWD + "/server/.files/users/test.pdf"
To use the fs package of node you also need to include it and you need to be on the server:
Router.route('/pdf', function() {
var filePath = process.env.PWD + "/server/.files/users/test.pdf";
var fs = Meteor.npmRequire('fs');
var data = fs.readFileSync(filePath);
this.response.write(data);
this.response.end();
}, {
where: 'server'
});
Make sure to this package to your project (https://atmospherejs.com/meteorhacks/npm)
meteor add meteorhacks:npm
I tested it and it is working like a charm!
So far I have been able to pull down streaming real time data from Twitter. How do I use this data? I am trying to insert it into collection but I am getting this Error:
Error: Meteor code must always run within a Fiber. Try wrapping callbacks that you pass to non-Meteor libraries with Meteor.bindEnvironment.
I tried wrapping my code with a fiber but it didn't work/or I am not wrapping the right part of the code. Also, I'm not sure if this is the proper way to use streaming data in Meteor.
Posts = new Meteor.Collection('posts');
if (Meteor.isClient) {
Meteor.call("tweets", function(error, results) {
console.log(results); //results.data should be a JSON object
});
}
if (Meteor.isServer) {
Meteor.methods({
tweets: function(){
Twit = new TwitMaker({
consumer_key: '...',
consumer_secret: '...',
access_token: '...',
access_token_secret: '...'
});
sanFrancisco = [ '-122.75', '36.8', '-121.75', '37.8' ];
stream = Twit.stream('statuses/filter', { locations: sanFrancisco });
stream.on('tweet', function (tweet) {
userName = tweet.user.screen_name;
userTweet = tweet.text;
console.log(userName + " says: " + userTweet);
Posts.insert({post: tweet})
})
}
})
}
Code that mutates the database needs to be run in a fiber, which is what the error is about. Code that runs in a callback from a library other than Meteor is not (necessarily) run in a fiber, so you'll need to wrap the callback function to make sure it gets run in a fiber, or at least the part of it that interacts with the database.
Meteor.bindEnvironment is not currently documented, but it is generally considered the most reliable method of wrapping callbacks. Meteor.bindEnvironment, which the error talks about, is defined here for reference:
https://github.com/meteor/meteor/blob/master/packages/meteor/dynamics_nodejs.js#L63
Something like this is probably the easiest way of making this work:
tweets: function() {
...
// You have to define this wrapped function inside a fiber .
// Meteor.methods always run in a fiber, so we should be good here.
// If you define it inside the callback, it will error out at the first
// line of Meteor.bindEnvironment.
var wrappedInsert = Meteor.bindEnvironment(function(tweet) {
Posts.insert(tweet);
}, "Failed to insert tweet into Posts collection.");
stream.on('tweet', function (tweet) {
var userName = tweet.user.screen_name;
var userTweet = tweet.text;
console.log(userName + " says: " + userTweet);
wrappedInsert(tweet);
});
}
This works for me. Essential is to call Meteor.bindEnvironment from inside the Twit callback.
Meteor.methods({
consumeTwitter: function () {
var Twit = Meteor.npmRequire('twit');
var T = new Twit({
consumer_key: 'xxx', // API key
consumer_secret: 'yyy', // API secret
access_token: 'xxx',
access_token_secret: 'xxx'
});
// search twitter for all tweets containing the word 'banana'
var now = new Date().getTime();
var wrappedInsert = Meteor.bindEnvironment(function(tweet) {
Tweets.insert(tweet);
}, "Failed");
T.get('search/tweets',
{
q: 'banana since:2011-11-11',
count: 4
},
function(err, data, response) {
var statuses = data['statuses'];
for(var i in statuses) {
wrappedInsert(statuses[i]);
}
}
)}
});
I had written a lengthy post about Building Twitter Monitoring Apps with MeteorJS from Scratch, including the Meteor.bindEnvironment part, extract as below.
var Twit = Meteor.npmRequire(‘twit’);
var conf = JSON.parse(Assets.getText(‘twitter.json’));
var T = new Twit({
consumer_key: conf.consumer.key,
consumer_secret: conf.consumer.secret,
access_token: conf.access_token.key,
access_token_secret: conf.access_token.secret
//
// filter the twitter public stream by the word ‘iwatch’.
//
var stream = T.stream(‘statuses/filter’, { track: conf.keyword })
stream.on(‘tweet’, Meteor.bindEnvironment(function (tweet) {
console.log(tweet);
Tweets.insert(tweet);
}))
There are only two functions added:
Meteor.bindEnvironment()
This function helps us to bind a function to the current value of all the environment variables.
Have fun!