Embedding multiple videos using oEmbed and Javascript - javascript

I'm trying to embed multiple videos to a web page using Vimeo's oEmbed. The idea is to simply enter the url in the CMS which will generate a div for each item containing the code below.
This javascript is doing what I want but only works with the first item. When I check the console there's only one response which contains the JSON metadata for the first item/video.
Probably this is not the best method but is getting the job done, all I need is to make it work for multiple items. Any ideas how can I do that?
Thank you
<div class="vimeo-video" id="[[+ID]]-video"></div>
<div class="vimeo-info" id="[[+ID]]-info"></div>
<script>
const getJSON = async url => {
try {
const response = await fetch(url);
if (!response.ok) // check if response worked (no 404 errors etc...)
throw new Error(response.statusText);
const data = await response.json(); // get JSON from the response
return data; // returns a promise, which resolves to this data value
} catch (error) {
return error;
}
}
console.log("Fetching data...");
getJSON("https://vimeo.com/api/oembed.json?url=[[+myVideoURL]]").then(data => {
document.getElementById("[[+ID]]-video").innerHTML = data.html;
document.getElementById("[[+ID]]-info").innerHTML = '<h2>' + data.title + '</h2>' + data.description;
console.log(data);
}).catch(error => {
console.error(error);
});
</script>

In case somebody with basic javascript skills like me goes through something similar. The problem was a rookie's mistake, I had to use var instead of const.
The reason is because var variables can be updated and re-declared but const variables can neither be updated nor re-declared. So here's the working code:
var getJSON = async (url) => {
try {
var response = await fetch(url);
if (!response.ok)
// check if response worked (no 404 errors etc...)
throw new Error(response.statusText);
var data = await response.json(); // get JSON from the response
return data; // returns a promise, which resolves to this data value
} catch (error) {
return error;
}
};

Related

function stops when fetch() fails

so I have a normal thing you would do to find if a file exists and proceed accordingly:
let response = await fetch(url);
if (response.ok) {
//it exists
} else {
//it doesn't
}
Problem is, of course, if it fails it gives me a TypeError: Failed to fetch and my function stops.
Is there a way I can suppress the error?
I cannot change from a fetch function, i've tried everything else and fetch is the best option in my context.
Thanks in advance.
You will need to implement try and catch and it is quite easy to implement it. You can have a look at Try and Catch Documentation
Have a look at the sample code below
try {
let response = await fetch(url);
}
catch(err) {
alert("Error Message : " + err.message);
}
Is there a way I can suppress the error?
You should wrap your function that may cause the error in try...catch.
const fetch = (isFine) => new Promise(resolve => {
if(isFine)
resolve("Normal data");
else
throw new Error("Error msg...!");
});
const myFunction = async (isFine) => {
try {
const response = await fetch(isFine);
console.log(response);
}
catch(err) {
console.log("Oops..." + err.message);
}
}
myFunction(true);
myFunction(false);

Return paginated output recursively with Fetch API

Summary
I'd like to collate paginated output into an array using JavaScript's Fetch API recursively. Having started out with promises, I thought an async/await function would be more suitable.
Attempt
Here's my approach:
global.fetch = require("node-fetch");
async function fetchRequest(url) {
try {
// Fetch request and parse as JSON
const response = await fetch(url);
let data = await response.json();
// Extract the url of the response's "next" relational Link header
let next_page = /<([^>]+)>; rel="next"/g.exec(response.headers.get("link"))[1];
// If another page exists, merge it into the array
// Else return the complete array of paginated output
if (next_page) {
data = data.concat(fetchRequest(next_page));
} else {
console.log(data);
return data;
}
} catch (err) {
return console.error(err);
}
}
// Live demo endpoint to experiment with
fetchRequest("https://jsonplaceholder.cypress.io/posts?_page=9");
For this demo, it should result in 2 requests which yield a single array of 20 objects. Although the data is returned, I can't fathom how to collate it together into an array. Any guidance would be really appreciated. Thanks for your time.
Solution #1
Thanks to #ankit-gupta:
async function fetchRequest(url) {
try {
// Fetch request and parse as JSON
const response = await fetch(url);
let data = await response.json();
// Extract the url of the response's "next" relational Link header
let next_page;
if (/<([^>]+)>; rel="next"/g.test(response.headers.get("link"))) {
next_page = /<([^>]+)>; rel="next"/g.exec(response.headers.get("link"))[1];
}
// If another page exists, merge its output into the array recursively
if (next_page) {
data = data.concat(await fetchRequest(next_page));
}
return data;
} catch (err) {
return console.error(err);
}
}
fetchRequest("https://jsonplaceholder.cypress.io/posts?_page=9").then(data =>
console.log(data)
);
For each page, subsequent calls are made recursively and concatenated together into one array. Would it be possible to chain these calls in parallel, using Promises.all, similar to this answer?
On a side note, any ideas why StackOverflow Snippets fails on the second Fetch?
You need to wrap next_page in a condition, otherwise it will lead to type error on the last call (Since /<([^>]+)>; rel="next"/g.exec(response.headers.get("link")) will be null)
Before concating data, you need the promise to get resolved.
Making some minor changes to your code can result in the correct output:
global.fetch = require("node-fetch");
async function fetchRequest(url) {
try {
// Fetch request and parse as JSON
const response = await fetch(url);
let data = await response.json();
// Extract the url of the response's "next" relational Link header
let next_page;
if(/<([^>]+)>; rel="next"/g.exec(response.headers.get("link")))
next_page = /<([^>]+)>; rel="next"/g.exec(response.headers.get("link"))[1];
// If another page exists, merge it into the array
// Else return the complete array of paginated output
if (next_page) {
let temp_data = await fetchRequest(next_page);
data = data.concat(temp_data);
}
return data;
} catch (err) {
return console.error(err);
}
}
// Live, demo endpoint to experiment
fetchRequest("https://jsonplaceholder.cypress.io/posts?_page=9").then(data => {
console.log(data);
});

Return each response from a loop

I have an function using axios where I am deleting multiple records based on the number of ids returned for a specific user.
async function DeleteAllRecords (emailAddress) {
try {
var accessToken = await setup.getAccessToken(emailAddress);
var userId = await user.getUserId(emailAddress);
var recordIds = await getAllRecordID(emailAddress);
console.log(`Deleting all records for `+emailAddress+``);
for (const rId of recordIds) {
const response = await axios.delete(`${process.env.API_URL}/`+userId+`/records/`+recordIds+``, {'headers': {Authorization: 'Bearer '+accessToken+''}});
}
return response;
}
catch(e) {
console.error(``+emailAddress+` produced the Record Delete Error = ` + e);
}
}
This isn't working, and I'm unsure why. I would like to see the response for each axios.delete call, but I'm not sure how to get that. Currently its returning as response undefined.
Why your code didn't work:
const response is declared inside the loop scope, and is not accessible out of this closure.
Even if it was defined before the loop (using let const), and assigned inside the loop, you would still be able to return only the last response.
You can push each response to an array (responses), and return the array:
async function DeleteAllRecords (emailAddress) {
try {
var accessToken = await setup.getAccessToken(emailAddress);
var userId = await user.getUserId(emailAddress);
var recordIds = await getAllRecordID(emailAddress);
console.log(`Deleting all records for `+emailAddress+``);
const responses = [];
for (const rId of recordIds) {
const response = await axios.delete(`${process.env.API_URL}/`+userId+`/records/`+recordIds+``, {'headers': {Authorization: 'Bearer '+accessToken+''}});
responses.push(response);
}
return responses;
}
catch(e) {
console.error(``+emailAddress+` produced the Record Delete Error = ` + e);
}
}
However, in this case multiple parallel requests would be better, since you don't actually need to delete one by one. I would use Array.map() to iterate the recordIds array, and return a promise for each one, then wait for all responses using Promise.all(), which would also return an array of responses:
async function DeleteAllRecords (emailAddress) {
try {
var accessToken = await setup.getAccessToken(emailAddress);
var userId = await user.getUserId(emailAddress);
var recordIds = await getAllRecordID(emailAddress);
console.log(`Deleting all records for `+emailAddress+``);
return Promise.all(recordIds.map(rId => axios.delete(`${process.env.API_URL}/`+userId+`/records/`+recordIds+``, {'headers': {Authorization: 'Bearer '+accessToken+''}})));
}
catch(e) {
console.error(``+emailAddress+` produced the Record Delete Error = ` + e);
}
}
This isn't working, and I'm unsure why. I would like to see the response for each axios.delete call, but I'm not sure how to get that. Currently its returning as response undefined.
There are several issues in your code:
const response declared within loop block, but you try to return it after the loop. const and let are strictly block scoped, thus by referring to response after the loop block basically tells JS to return an undefined variable.
You write several times to response. If the issue above wasnt in place, it would still not work correctly, since you only would end up with the response of the last loop run. Here you'd have to collect the response from all runs, e.g. into a list.

Reread a response body from JavaScript's fetch

fetch() returns promise which (if successful) resolves to a Response object. A very common thing to do is immediately call Response.json() to convert the response body to a JSON object.
If the response body isn't valid JSON, then the Response.json() promise fails with an error. The message is something along the lines of:
Unexpected token X in JSON at position 0
That's not very helpful when trying to diagnose the problem; ideally I'd like to be able to see the content from the server (which is often an error message).
However, it appears that you can only read the stream at Response.body once (at least in Chrome). (There's even a read-only Response.bodyUsed flag.) That has already happened when Response.json() tries to convert the body to JSON, so the body appears to be lost forever in the event of a JSON parsing failure.
Is there any way to recover the original response body... short of manually reading it (and then converting to JSON) when the original fetch Promise resolves?
Use Response.clone() to clone Response
let clone = response.clone();
Alternatively, use Response.body.getReader() which returns a ReadableStream to read Response as a stream, TextDecoder() to convert Uint8Array data stream to text.
I had to deal with an API that occasionally botched the JSON response - before returning response.json() I made a clone of the response object. using a catch block, I can determine if the error is a SyntaxError, and proceed to fix the error using the text result of the response clone
a little like this:
var brokenJson = function (url) {
var responseCopy;
return fetch(url)
.then(function (response) {
responseCopy = response.clone();
return response.json();
}).catch(function (err) {
if (err instanceof SyntaxError) {
return responseCopy.text()
.then(function(data) {
return fixJson(data);
});
}
else {
throw err;
}
}).then(function (json) {
// do things
});
};
fixJson is just a function that fixes the received data - in my case, when it was broken JSON, it was always broken the same way - I think it had an extra leading { or trailing } - can't recall
re-reading the question, you're more likely to want to log the error to the console rather than fix the json - easy rewrite:
var brokenJson = function (url) {
var responseCopy;
return fetch(url)
.then(function (response) {
responseCopy = response.clone();
return response.json();
}).catch(function (err) {
if (err instanceof SyntaxError) {
return responseCopy.text()
.then(function(text) {
console.error(text);
throw err;
});
}
else {
throw err;
}
}).then(function (json) {
// do things
});
};
Assigning the response.json() to a variable and returning it worked for me. clone() was again saying its locked.
fetch("http://localhost:3000/watchlist")
.then(response => {
var res = response.json();
return res;
})
.then(data => {
console.log(data);
this.setState({ data });
});
I used JSON.parse(response.resp.data) as somehow clone didn't work.

Did I set my map variable properly to a fetched json?

I have some code below to transform a fetched json into string and remove the braces and parse it into a map variable:
let result = '';
let map = [];
fetch(link)
.then(function(response) {
return response.json();
}).then(function(json) {
result = JSON.stringify(json); //json here is like [{'a':1,'a2':2},{'b':11,'b2':12}]
result = result.substring(1, result.length - 1);
map = JSON.parse(result);
}).catch(function(ex) {
console.log('parsing failed', ex);
});
I've tried to simply set map=json but it both gives out the same error that I have duplicate keys for the map.
If I hard code my map variable to lets say [{'id':1,code: 007},{'id':2, code:014}] it works. I've tried to log result after changing it to a string and its exactly as the example. So in other words, setting map=json should work in the first place.
I think I might be missing something. What is it?
EDIT #1:
fetch(link)
.then(function(response) {
return response.json();
}).then(function(json) {
setData(json);
document.getElementById("result").innerHTML = json;
}).catch(function(ex) {
console.log('parsing failed', ex);
});
function setData(json) {
map = json;
}
I've tried the solution given by Naomik except without the response ok part.
I'm still receiving the same error as map = json. Any help with this?
You can't set a variable like that in an asynchronous handler and expect it to be set outside of the handler. More about that here: "How do I return the response from an asynchronous call?"
I would've just marked your question as a duplicate of that one, but there's other issues with your code so I'm gonna address those now
In your second .then call, you are attempting to process json but you're doing it incorrectly
// Why exactly are you re-stringifying the data we just parsed ?
result = JSON.stringify(json);
// ... wtf ?
result = result.substring(1, result.length - 1);
// Same as above, any vars you set here cannot be read outside of the callback.
// This won't work like you expect (see the related question I linked above)
map = JSON.parse(result);
Remember, JSON is just a string that represents your data, nothing else
JSON.parse will take a json string and turn it into data.
JSON.stringify will take data and turn it into a json string.
This example might help you a bit
fetch(link).then(function(response) {
if (response.ok)
// read JSON response and parse it
return response.json()
else
// response is not OK, throw error
throw Error(response.status + " " + response.statusText)
})
.then(function(data) {
console.log("Your data has arrived!")
console.log("Do something here with your data")
doSomething(data)
})
.catch(function(err) {
console.error(err.message)
})
function doSomething(data) {
console.log("Here's your data:", data);
}

Categories