I'm trying to parse a XML file, build a object of that infos and push the results to an array. This runs on a node js server so I use promises to do this synchron. In the parseImportantInfos method all parsed objects are printed correct to the console, but after I resolve this objects to the getLocations methode and console.log again only the first situationrecord of one situation will be resolved.
The XML looks like:
...
<situation>
<situationrecord version="123">...</situationrecord>
<situationrecord version="123">...</situationrecord>
</situation>
<situation>
<situationrecord version="456">...</situationrecord>
<situationrecord version="456">...</situationrecord>
</situation>
...
<!-- sometimes there is only one situationrecord -->
<situation>
<situationrecord version="789">...</situationrecord>
</situation>
This is the part of my code where the parsed objects get "lost".
let getLocations = function(xmlArr) {
return new Promise(function(resolve, reject) {
for (var x in xmlArr) {
var $ = cheerio.load(xmlArr[x]);
var promiseList = [];
$('situation').each(function() {
var newPromise = parseImportantInfos($, this)
.then(function(obj) {
console.log(obj); // <---- Here are parsed situations missing, only the first situationrecord of a situation is received
if (obj != "") {
constructionsites.push(obj);
}
});
promiseList.push(newPromise);
});
Promise.all(promiseList)
.then(function() {
resolve(constructionsites);
});
}
});
};
let parseImportantInfos = function($, record) {
return new Promise(function(resolve, reject) {
$(record).find('situationRecord').each(function() {
var startLocationCode = $(this).find('alertCMethod2SecondaryPointLocation').find('specificLocation').text();
var endLocationCode = $(this).find('alertCMethod2PrimaryPointLocation').find('specificLocation').text();
var managementType = $(this).find('roadOrCarriagewayOrLaneManagementType').text();
if (startLocationCode != '' && endLocationCode != '' && managementType != 'roadClosed') {
createSituationRecord($, this, startLocationCode, endLocationCode)
.then(function(obj) {
console.log(obj); // <----------- This log prints all parsed infos correct
resolve(obj);
})
.catch((err) => {
reject("There was an error while parsing the xml");
console.log('err', err.stack);
});
} else {
resolve("");
}
});
});
};
I already debugged the project but can't figure out why only one part of the object where resolved
Related
I am trying to fetch JSON data from the WordPress Developer Reference site. I need to search a keyword without knowing if it's a function, class, hook, or method, which is part of the url I need to fetch. So I'm using Promise.all to cycle through all possible urls. It works if the response.status <= 299, throwing the error immediately, and if the response is ok, then it continues to .then. Fine, but occasionally it will return an ok status if the JSON exists and only returns an empty array. So I need to check if the JSON data is an empty array, which I can't seem to do in the first part. I can only check in the second part as far as I know. And if it throws the error it doesn't continue trying the other urls. Any suggestions?
var keyword = 'AtomParser';
const refs = ['function', 'hook', 'class', 'method'];
// Store the promises
let promises = [];
// Cycle through each type until we find one we're looking for
for (let t = 0; t < refs.length; t++) {
const url =
'https://developer.wordpress.org/wp-json/wp/v2/wp-parser-' +
refs[t] +
'?search=' +
keyword;
// console.log(url);
promises.push(fetch(url));
}
Promise.all(promises)
.then(function(response) {
console.log(response[0]);
// Get the status
console.log('Status code: ' + response[0].status);
if (response[0].status <= 299) {
// The API call was successful!
return response[0].json();
} else {
throw new Error('Broken link status code: ' + response[0].status);
}
})
.then(function(data) {
// This is the HTML from our response as a text string
console.log(data);
// Make sure we have data
if (data.length == 0) {
throw new Error('Empty Array');
}
// ref
const reference = data[0];
// Only continue if not null or empty
if (reference !== null && reference !== undefined && data.length > 0) {
// Success
// Return what I want from the reference
}
})
.catch(function handleError(error) {
console.log('Error' + error);
});
Is there some way to get the JSON data in the first part so I can check if it's in an array while I'm checking the response status?
I would recommend encapsulating the success / failure logic for individual requests, then you can determine all the resolved and rejected responses based on the result of that encapsulation.
For example
const checkKeyword = async (ref, keyword) => {
const params = new URLSearchParams({ search: keyword });
const res = await fetch(
`https://developer.wordpress.org/wp-json/wp/v2/wp-parser-${encodeURIComponent(
ref
)}?${params}`
);
if (!res.ok) {
throw new Error(`${res.status}: ${await res.text()}`);
}
const data = await res.json();
if (data.length === 0) {
throw new Error(`Empty results for '${ref}'`);
}
return { ref, data };
};
Now you can use something like Promise.any() or Promise.allSettled() to find the first successful request or all successful requests, respectively
const keyword = "AtomParser";
const refs = ["function", "hook", "class", "method"];
const promises = refs.map((ref) => checkKeyword(ref, keyword));
// First success
Promise.any(promises)
.then(({ ref, data }) => {
console.log(ref, data);
})
.catch(console.error);
// All successes
Promise.allSettled(promises)
.then((responses) =>
responses.reduce(
(arr, { status, value }) =>
status === "fulfilled" ? [...arr, value] : arr,
[]
)
)
.then((results) => {
// results has all the successful responses
});
For whatever reason I couldn't get Phil's answer to work, so I ended up doing the following which works fine for me. (This is for a discord bot in case you're wondering what the other stuff is all about).
var keyword = 'AtomParser';
const refs = ['function', 'hook', 'class', 'method'];
// Store the successful result or error
let final: any[] = [];
let finalError = '';
// Cycle through each type until we find one we're looking for
for (let t = 0; t < refs.length; t++) {
const url =
'https://developer.wordpress.org/wp-json/wp/v2/wp-parser-' +
refs[t] +
'?search=' +
keyword;
console.log(url);
// Try to fetch it
await fetch(url)
.then(function (response) {
console.log(response);
// Get the status
console.log('Status code: ' + response.status);
if (response.status > 299) {
finalError = '`' + refs[t] + '` does not exist.';
throw new Error(finalError);
} else {
// The API call was successful!
return response.json();
}
})
.then(function (data) {
// This is the HTML from our response as a text string
console.log(data);
// Make sure we have data
if (data.length == 0) {
finalError = "Sorry, I couldn't find `" + keyword + '`';
throw new Error(finalError);
}
// Only continue if not null or empty
if (data[0] !== null && data[0] !== undefined && data.length > 0) {
for (let d = 0; d < data.length; d++) {
// Add it to the final array
final.push(data[d]);
}
}
})
.catch(function handleError(error) {
console.log(error);
});
}
if (final.length > 0) {
for (let f = 0; f < final.length; f++) {
// ref
const reference = final[f];
// Get the link
const link = reference.link;
// Get the title
var title = reference.title.rendered;
title = excerpt.replace('>', '>');
// Get the excerpt
var excerpt = reference.excerpt.rendered;
excerpt = excerpt.replace('<p>', '');
excerpt = excerpt.replace('</p>', '');
excerpt = excerpt.replace('<b>', '**');
excerpt = excerpt.replace('</b>', '**');
console.log(excerpt);
message.reply(
new discord.Embed({
title: `${title}`,
url: link,
description: `${excerpt}\n\n`,
footer: {
text: `WordPress Developer Code Reference\nhttps://developer.wordpress.org/`,
},
})
);
}
} else if (finalError != '') {
message.reply(finalError);
} else {
message.reply('Something went wrong...');
}
wp module
#Phil's answer puts you on the right track but I want to expand on some of his ideas. Use of URLSearchParamas is great but you can improve by using the high-level URL API and forego encodeURIComponent and constructing search params manually. Notice I'm putting this code in its own wp module so I can separate concerns more easily. We don't want all of this code leaking into your main program.
// wp.js
import { fetch } from "whatwg-fetch" // or your chosen implementation
const baseURL = "https://developer.wordpress.org"
async function search1(path, query) {
const u = new URL(path, baseURL)
u.searchParams.set("search", query)
const result = await fetch(u)
if (!result.ok) throw Error(`Search failed (${result.status}): ${u}`)
return result.json()
}
search1 searches one path, but we can write search to search all the necessary paths. I don't think there's any reason to get fancy with each path here, so just write them out -
// wp.js (continued)
function search(query) {
const endpoints = [
"/wp-json/wp/v2/wp-parser-function",
"/wp-json/wp/v2/wp-parser-hook",
"/wp-json/wp/v2/wp-parser-class",
"/wp-json/wp/v2/wp-parser-method"
]
return Promise
.all(endpoints.map(e => search1(e, query)))
.then(results => results.flat())
}
export { search }
main module
Notice we only exported search as search1 is internal to the wp module. Let's see how we can use it in our main module now -
// main.js
import { search } from "./wp.js"
for (const result of await search("database"))
if(result.guid.rendered)
console.log(`${result.title.rendered}\n${result.guid.rendered}\n`)
In this example, we first search for "database" -
wp_should_replace_insecure_home_url()
https://developer.wordpress.org/reference/functions/wp_should_replace_insecure_home_url/
wp_delete_signup_on_user_delete()
https://developer.wordpress.org/reference/functions/wp_delete_signup_on_user_delete/
get_post_datetime()
https://developer.wordpress.org/reference/functions/get_post_datetime/
wp_ajax_health_check_get_sizes()
https://developer.wordpress.org/reference/functions/wp_ajax_health_check_get_sizes/
wp_should_replace_insecure_home_url
https://developer.wordpress.org/reference/hooks/wp_should_replace_insecure_home_url/
comments_pre_query
https://developer.wordpress.org/reference/hooks/comments_pre_query/
users_pre_query
https://developer.wordpress.org/reference/hooks/users_pre_query/
WP_Object_Cache
http://developer.wordpress.org/reference/classes/wp_object_cache/
wpdb
http://developer.wordpress.org/reference/classes/wpdb/
WP_REST_Menu_Items_Controller::prepare_item_for_database()
https://developer.wordpress.org/reference/classes/wp_rest_menu_items_controller/prepare_item_for_database/
WP_REST_Global_Styles_Controller::prepare_item_for_database()
https://developer.wordpress.org/reference/classes/wp_rest_global_styles_controller/prepare_item_for_database/
WP_REST_Menus_Controller::prepare_item_for_database()
https://developer.wordpress.org/reference/classes/wp_rest_menus_controller/prepare_item_for_database/
WP_REST_Templates_Controller::prepare_item_for_database()
https://developer.wordpress.org/reference/classes/wp_rest_templates_controller/prepare_item_for_database/
WP_REST_Application_Passwords_Controller::prepare_item_for_database()
https://developer.wordpress.org/reference/classes/wp_rest_application_passwords_controller/prepare_item_for_database/
wpdb::db_server_info()
https://developer.wordpress.org/reference/classes/wpdb/db_server_info/
WP_REST_Attachments_Controller::insert_attachment()
https://developer.wordpress.org/reference/classes/wp_rest_attachments_controller/insert_attachment/
WP_Debug_Data::get_database_size()
https://developer.wordpress.org/reference/classes/wp_debug_data/get_database_size/
WP_REST_Meta_Fields::update_multi_meta_value()
https://developer.wordpress.org/method/wp_rest_meta_fields/update_multi_meta_value/
another search example
Now let's search for "image" -
for (const result of await search("image"))
if(result.guid.rendered)
console.log(`${result.title.rendered}\n${result.guid.rendered}\n`)
get_adjacent_image_link()
https://developer.wordpress.org/reference/functions/get_adjacent_image_link/
get_next_image_link()
https://developer.wordpress.org/reference/functions/get_next_image_link/
get_previous_image_link()
https://developer.wordpress.org/reference/functions/get_previous_image_link/
wp_robots_max_image_preview_large()
https://developer.wordpress.org/reference/functions/wp_robots_max_image_preview_large/
wp_getimagesize()
https://developer.wordpress.org/reference/functions/wp_getimagesize/
is_gd_image()
https://developer.wordpress.org/reference/functions/is_gd_image/
wp_show_heic_upload_error()
https://developer.wordpress.org/reference/functions/wp_show_heic_upload_error/
wp_image_src_get_dimensions()
https://developer.wordpress.org/reference/functions/wp_image_src_get_dimensions/
wp_image_file_matches_image_meta()
https://developer.wordpress.org/reference/functions/wp_image_file_matches_image_meta/
_wp_check_existing_file_names()
https://developer.wordpress.org/reference/functions/_wp_check_existing_file_names/
edit_custom_thumbnail_sizes
https://developer.wordpress.org/reference/hooks/edit_custom_thumbnail_sizes/
get_header_image_tag_attributes
https://developer.wordpress.org/reference/hooks/get_header_image_tag_attributes/
image_editor_output_format
https://developer.wordpress.org/reference/hooks/image_editor_output_format/
wp_image_src_get_dimensions
https://developer.wordpress.org/reference/hooks/wp_image_src_get_dimensions/
wp_get_attachment_image
https://developer.wordpress.org/reference/hooks/wp_get_attachment_image/
image_sideload_extensions
https://developer.wordpress.org/reference/hooks/image_sideload_extensions/
wp_edited_image_metadata
https://developer.wordpress.org/reference/hooks/wp_edited_image_metadata/
wp_img_tag_add_loading_attr
https://developer.wordpress.org/reference/hooks/wp_img_tag_add_loading_attr/
wp_image_file_matches_image_meta
https://developer.wordpress.org/reference/hooks/wp_image_file_matches_image_meta/
get_custom_logo_image_attributes
https://developer.wordpress.org/reference/hooks/get_custom_logo_image_attributes/
Custom_Image_Header
http://developer.wordpress.org/reference/classes/custom_image_header/
WP_Image_Editor_Imagick
http://developer.wordpress.org/reference/classes/wp_image_editor_imagick/
WP_Embed
http://developer.wordpress.org/reference/classes/wp_embed/
WP_Image_Editor
http://developer.wordpress.org/reference/classes/wp_image_editor/
WP_Customize_Background_Image_Setting
http://developer.wordpress.org/reference/classes/wp_customize_background_image_setting/
WP_Customize_Header_Image_Setting
http://developer.wordpress.org/reference/classes/wp_customize_header_image_setting/
WP_Image_Editor_GD
http://developer.wordpress.org/reference/classes/wp_image_editor_gd/
WP_Customize_Header_Image_Control
http://developer.wordpress.org/reference/classes/wp_customize_header_image_control/
WP_REST_Server::add_image_to_index()
https://developer.wordpress.org/reference/classes/wp_rest_server/add_image_to_index/
WP_REST_URL_Details_Controller::get_image()
https://developer.wordpress.org/reference/classes/wp_rest_url_details_controller/get_image/
WP_Image_Editor::get_default_quality()
https://developer.wordpress.org/reference/classes/wp_image_editor/get_default_quality/
WP_Theme_JSON::get_blocks_metadata()
https://developer.wordpress.org/reference/classes/wp_theme_json/get_blocks_metadata/
WP_Image_Editor_Imagick::pdf_load_source()
https://developer.wordpress.org/reference/classes/wp_image_editor_imagick/pdf_load_source/
WP_Image_Editor_Imagick::write_image()
https://developer.wordpress.org/reference/classes/wp_image_editor_imagick/write_image/
WP_Image_Editor_Imagick::maybe_exif_rotate()
https://developer.wordpress.org/reference/classes/wp_image_editor_imagick/maybe_exif_rotate/
WP_Image_Editor_Imagick::make_subsize()
https://developer.wordpress.org/reference/classes/wp_image_editor_imagick/make_subsize/
WP_Image_Editor_GD::make_subsize()
https://developer.wordpress.org/reference/classes/wp_image_editor_gd/make_subsize/
empty search result
Searching for "zzz" will yield no results -
for (const result of await search("zzz"))
if(result.guid.rendered)
console.log(`${result.title.rendered}\n${result.guid.rendered}\n`)
<empty result>
This is a piece of code which writes data to a ble device and reads data from it. data is written to the device in the form of a buffer. the value in 'mydata' (AAAD0000) is the command to be written in order to read the data.
function named chara3() consists of write and read function which is a callback function in which the command is passed read back.
My requirement is the 'mydata' value which i said earlier, the last two zeros is the memory address. i need to read the data in different memory addresses starting from zero to 59. That is AAAD0000 to AAAD0059. so of course i need to run a loop. If I'm reading the zeroth location, the code is quite fine and i got the output as well but when i tried to make it inside a loop, the code is all a mess. the read part is not executing.
can any one suggest a better way to read data from zeroth memory location to 59th memory location (AAAD0000 to AAAD0059)???
first command writes to it
then reads data
memory location incremented by 1
this should repeat up to 59
var mydata = 'AAAD0000';
function chara3() {
var buff2 = new Buffer(mydata, 'hex');
SensorCharacteristic.write(buff2, false, function(error) { //[0x002d]
console.log('Writing command SUCCESSFUL',mydata);
if (!error) {
SensorCharacteristic.read((error, data) => {
console.log("i just entered");
if (data.toString('hex') != '0000') {
console.log('Temperature History: ', data.toString('hex'));
enter();
}
else {
console.log('contains null value');
} //else
});
}
function enter()
{
mydata = (parseInt(mydata, 16) + 00000001).toString(16);
}
}); //.write
} //chara3
there's no error. But some part of the code is not executing.
You can use the recursion by wrapping your methods into a promise
async function chara3(mydata = 'AAAD0000') {
if (mydata === 'AAAD0059') {
return;
}
var buff2 = new Buffer(mydata, 'hex');
return new Promise((resolve) => {
SensorCharacteristic.write(buff2, false, function (error) { //[0x002d]
console.log('Writing command SUCCESSFUL', mydata);
if (!error) {
SensorCharacteristic.read(async (error, data) => {
console.log("i just entered");
if (data.toString('hex') != '0000') {
console.log('Temperature History: ', data.toString('hex'));
let next = await chara3(enter())
return resolve(next);
}
else {
console.log('contains null value');
return resolve();
} //else
});
}
}); //.write
});
} //chara3
function enter() {
return (parseInt(mydata, 16) + 00000001).toString(16);
}
Also if you can convert your methods SensorCharacteristic.write and SensorCharacteristic.read into promises you can simply map
function chara3(mydata) {
var buff2 = new Buffer(mydata, 'hex');
await SensorCharacteristic.write(buff2, false);
console.log('Writing command SUCCESSFUL', mydata);
let data = await SensorCharacteristic.read();
if (data.toString('hex') != '0000') {
console.log('Temperature History: ', data.toString('hex'));
enter();
} else {
console.log('contains null value');
}
};
let promiseArray = Array(60).fill().map((_, i) => (parseInt('AAAD0000', 16) + i).toString(16)).map(chara3);
Promise.all(promiseArray).then(() => console.log('done'));
I tried to configure backend factory to obtain data using Amazon Products Api.
Here is the script I execute in nodejs:
var amazon = require('amazon-product-api');
var client = amazon.createClient({
awsId: 'ID',
awsSecret: 'sectret',
awsTag: 'tag',
});
// SERVER
// var Promise = require('bluebird');
var koa = require('koa');
var router = require('koa-router')();
router.get('/node/:index', function* () {
this.body = yield client.browseNodeLookup({
browseNodeId: this.params.index,
});
});
router.get('/', function* (ctx, next) {
var node = yield client.browseNodeLookup({
browseNodeId: 1063498,
});
this.body = node;
});
var app = koa();
app.use(router.routes()).use(router.allowedMethods());
app.listen(8005);
At the frontend I use Promise.map() by bluebird.js to map an array of amazon's product nodes. At the final of the function, I expect to transform links (strings) in the Array to objects (obtained by API).
Here is the function:
someFn(links) { // links is an array of node IDs
return Promise.map(links, (link) => {
var link = link;
if (typeof link === "object" || level > 1) {
return link;
} else {
return loadUrl({
url: 'http://localhost:8005/node/'+link,
action: 'json',
}).then((res) => {
if (isJSON(res)) {
return new Category(res); // transform string to object
} else {
return link;
}
});
}
});
}
Amazon allows at max 10 queries, that's why I need to run the function a few times or loop it to obtain an object for the each string in the Array.
The idea is to wait for the successful answer at the backend or repeat the query (yield client.browseNodeLookup)
Or just pass the array of node IDs and get JSON of each as the result.
I have not a lot experience with nodejs server and routing configuration, so can you help me to configure it properly?
I still did not find the solution to use backend for the task, but I've updated a loadUrl function. The function runs itself until retrieved the successful answer:
function loadUrl(options) {
var options = options;
return new Promise((resolve, reject) => {
$.post('/', options).done((result) => {
if (result != 'Internal Server Error') {
resolve(result);
} else {
(function run(x) {
var x = x || 0;
// 300 - calls limit
// for me 100 is enough
if (x <= 300) {
setTimeout(function() {
$.post('/', options).done((result) => {
if (result != 'Internal Server Error') {
resolve(result);
} else {
console.log('call', x);
run(x+1);
}
});
}, 100);
} else {
resolve(result);
}
})();
}
});
});
}
As I understood, the $.post() has time limit for the answer, and thats why I've got the problem. The data is obtained by the backend factory, but my frontend script was not ready to wait for it and was stopped.
I am using sw-precache module with sw-toolbox. When i navigate to the web app url, the api response is cached in the toolbox cache storage and then if i turn off the internet and refresh the url. Service worker loads the api with a 200OK response from Service Worker Cache but if i navigate to some other url and then come back to the main web app. All the dynamic cached api response of the toolbox cache storage gets deleted. I am using gulp to create the service worker file. The gulp code to generate the file is mentioned below:
gulp.task('generate-service-worker', function(callback) {
var path = require('path');
var swPrecache = require('sw-precache');
var rootDir = './src';
swPrecache.write(path.join(rootDir, 'sw.js'), {
staticFileGlobs: [rootDir + '/**/*.{js,html,css,png,jpg,gif}'],
stripPrefix: rootDir,
importScripts: [
'sw-toolbox.js',
'toolbox-routes.js'
]
}, callback);
});
The sw-toolbox.js has the code generated from bower components of sw-toolbox library. The toolbox-routes.js has four calls as mentioned below:
toolbox.router.get(/^https:\/\/.*\/city/, toolbox.cacheFirst, {});
toolbox.router.get(/^https:\/\/.*\/splash/, toolbox.cacheFirst, {});
toolbox.router.get(/^https:\/\/.*amazonaws\.com\/.*/, toolbox.cacheFirst, {});
toolbox.router.get(/^https:\/\/.*\/api\/booking/, toolbox.networkFirst, {});
The city api looks something like this api for which i get a 404 error.
https://www.abcd.com/city?includes=venue,car
I also get a warning message in the console which says "The FetchEvent for "https://www.abcd.com/city?includes=venue,car" resulted in a network error response: the promise was rejected."
The service worker file is this
'use strict';
importScripts("sw-toolbox.js", "toolbox-routes.js");
/* eslint-disable quotes, comma-spacing */
var PrecacheConfig = [
["/app/directives/datepicker/datepicker.html", "fa21126be162d099882132226c9681d2"],
["/app/directives/datepicker/datepicker.js", "5ec9f32a36e10feeb03c40d984e74058"],
["/app/directives/map/map.js", "cfabf0230663391d8c78f46a2b198f89"],
["/index.html", "98b8890a31f58d2362953593bbda427f"],
["/index.notification.js", "0cf8aa857d28c309f7b37b97141c1853"],
["/sw-toolbox.js", "66531e5962e4dccb0526a2b4cd6364a4"],
["/toolbox-routes.js", "e7e168157a8afc939ecfb44da773270d"]
];
/* eslint-enable quotes, comma-spacing */
var CacheNamePrefix = 'sw-precache-v1--' + (self.registration ? self.registration.scope : '') + '-';
var IgnoreUrlParametersMatching = [/^utm_/];
var addDirectoryIndex = function(originalUrl, index) {
var url = new URL(originalUrl);
if (url.pathname.slice(-1) === '/') {
url.pathname += index;
}
return url.toString();
};
var getCacheBustedUrl = function(url, param) {
param = param || Date.now();
var urlWithCacheBusting = new URL(url);
urlWithCacheBusting.search += (urlWithCacheBusting.search ? '&' : '') +
'sw-precache=' + param;
return urlWithCacheBusting.toString();
};
var isPathWhitelisted = function(whitelist, absoluteUrlString) {
// If the whitelist is empty, then consider all URLs to be whitelisted.
if (whitelist.length === 0) {
return true;
}
// Otherwise compare each path regex to the path of the URL passed in.
var path = (new URL(absoluteUrlString)).pathname;
return whitelist.some(function(whitelistedPathRegex) {
return path.match(whitelistedPathRegex);
});
};
var populateCurrentCacheNames = function(precacheConfig,
cacheNamePrefix, baseUrl) {
var absoluteUrlToCacheName = {};
var currentCacheNamesToAbsoluteUrl = {};
precacheConfig.forEach(function(cacheOption) {
var absoluteUrl = new URL(cacheOption[0], baseUrl).toString();
var cacheName = cacheNamePrefix + absoluteUrl + '-' + cacheOption[1];
currentCacheNamesToAbsoluteUrl[cacheName] = absoluteUrl;
absoluteUrlToCacheName[absoluteUrl] = cacheName;
});
return {
absoluteUrlToCacheName: absoluteUrlToCacheName,
currentCacheNamesToAbsoluteUrl: currentCacheNamesToAbsoluteUrl
};
};
var stripIgnoredUrlParameters = function(originalUrl,
ignoreUrlParametersMatching) {
var url = new URL(originalUrl);
url.search = url.search.slice(1) // Exclude initial '?'
.split('&') // Split into an array of 'key=value' strings
.map(function(kv) {
return kv.split('='); // Split each 'key=value' string into a [key, value] array
})
.filter(function(kv) {
return ignoreUrlParametersMatching.every(function(ignoredRegex) {
return !ignoredRegex.test(kv[0]); // Return true iff the key doesn't match any of the regexes.
});
})
.map(function(kv) {
return kv.join('='); // Join each [key, value] array into a 'key=value' string
})
.join('&'); // Join the array of 'key=value' strings into a string with '&' in between each
return url.toString();
};
var mappings = populateCurrentCacheNames(PrecacheConfig, CacheNamePrefix, self.location);
var AbsoluteUrlToCacheName = mappings.absoluteUrlToCacheName;
var CurrentCacheNamesToAbsoluteUrl = mappings.currentCacheNamesToAbsoluteUrl;
function deleteAllCaches() {
return caches.keys().then(function(cacheNames) {
return Promise.all(
cacheNames.map(function(cacheName) {
return caches.delete(cacheName);
})
);
});
}
self.addEventListener('install', function(event) {
event.waitUntil(
// Take a look at each of the cache names we expect for this version.
Promise.all(Object.keys(CurrentCacheNamesToAbsoluteUrl).map(function(cacheName) {
return caches.open(cacheName).then(function(cache) {
// Get a list of all the entries in the specific named cache.
// For caches that are already populated for a given version of a
// resource, there should be 1 entry.
return cache.keys().then(function(keys) {
// If there are 0 entries, either because this is a brand new version
// of a resource or because the install step was interrupted the
// last time it ran, then we need to populate the cache.
if (keys.length === 0) {
// Use the last bit of the cache name, which contains the hash,
// as the cache-busting parameter.
// See https://github.com/GoogleChrome/sw-precache/issues/100
var cacheBustParam = cacheName.split('-').pop();
var urlWithCacheBusting = getCacheBustedUrl(
CurrentCacheNamesToAbsoluteUrl[cacheName], cacheBustParam);
var request = new Request(urlWithCacheBusting, {
credentials: 'same-origin'
});
return fetch(request).then(function(response) {
if (response.ok) {
return cache.put(CurrentCacheNamesToAbsoluteUrl[cacheName],
response);
}
console.error('Request for %s returned a response status %d, ' +
'so not attempting to cache it.',
urlWithCacheBusting, response.status);
// Get rid of the empty cache if we can't add a successful response to it.
return caches.delete(cacheName);
});
}
});
});
})).then(function() {
return caches.keys().then(function(allCacheNames) {
return Promise.all(allCacheNames.filter(function(cacheName) {
return cacheName.indexOf(CacheNamePrefix) === 0 &&
!(cacheName in CurrentCacheNamesToAbsoluteUrl);
}).map(function(cacheName) {
return caches.delete(cacheName);
}));
});
}).then(function() {
if (typeof self.skipWaiting === 'function') {
// Force the SW to transition from installing -> active state
self.skipWaiting();
}
})
);
});
if (self.clients && (typeof self.clients.claim === 'function')) {
self.addEventListener('activate', function(event) {
event.waitUntil(self.clients.claim());
});
}
self.addEventListener('message', function(event) {
if (event.data.command === 'delete_all') {
console.log('About to delete all caches...');
deleteAllCaches().then(function() {
console.log('Caches deleted.');
event.ports[0].postMessage({
error: null
});
}).catch(function(error) {
console.log('Caches not deleted:', error);
event.ports[0].postMessage({
error: error
});
});
}
});
self.addEventListener('fetch', function(event) {
if (event.request.method === 'GET') {
var urlWithoutIgnoredParameters = stripIgnoredUrlParameters(event.request.url,
IgnoreUrlParametersMatching);
var cacheName = AbsoluteUrlToCacheName[urlWithoutIgnoredParameters];
var directoryIndex = 'index.html';
if (!cacheName && directoryIndex) {
urlWithoutIgnoredParameters = addDirectoryIndex(urlWithoutIgnoredParameters, directoryIndex);
cacheName = AbsoluteUrlToCacheName[urlWithoutIgnoredParameters];
}
var navigateFallback = '';
// Ideally, this would check for event.request.mode === 'navigate', but that is not widely
// supported yet:
// https://code.google.com/p/chromium/issues/detail?id=540967
// https://bugzilla.mozilla.org/show_bug.cgi?id=1209081
if (!cacheName && navigateFallback && event.request.headers.has('accept') &&
event.request.headers.get('accept').includes('text/html') &&
/* eslint-disable quotes, comma-spacing */
isPathWhitelisted([], event.request.url)) {
/* eslint-enable quotes, comma-spacing */
var navigateFallbackUrl = new URL(navigateFallback, self.location);
cacheName = AbsoluteUrlToCacheName[navigateFallbackUrl.toString()];
}
if (cacheName) {
event.respondWith(
// Rely on the fact that each cache we manage should only have one entry, and return that.
caches.open(cacheName).then(function(cache) {
return cache.keys().then(function(keys) {
return cache.match(keys[0]).then(function(response) {
if (response) {
return response;
}
// If for some reason the response was deleted from the cache,
// raise and exception and fall back to the fetch() triggered in the catch().
throw Error('The cache ' + cacheName + ' is empty.');
});
});
}).catch(function(e) {
console.warn('Couldn\'t serve response for "%s" from cache: %O', event.request.url, e);
return fetch(event.request);
})
);
}
}
});
Thanks a tonnn in Advance!!!.
I have a Node.js application that, upon initialisation, reads two tables from an SQL database and reconstructs their relationship in memory. They're used for synchronously looking up data that changes (very) infrequently.
Problem: Sometimes I can't access the data, even though the application reports successfully loading it.
Code:
constants.js
module.exports = {
ready: function () { return false; }
};
var log = sysLog('core', 'constants')
, Geo = require('../models/geo.js');
var _ready = false
, _countries = []
, _carriers = [];
function reload() {
_ready = false;
var index = Object.create(null);
return Geo.Country.find().map(function (country) {
var obj = country.toPlainObject()
, id = obj.id;
delete obj.id;
index[id] = obj;
return Object.freeze(obj);
}).then(function (countries) {
log.debug('Loaded ' + countries.length + ' countries');
_countries = countries;
return Geo.Carrier.Descriptor.find().map(function (carrier) {
var obj = carrier.toPlainObject();
if (obj.country) {
obj.country = index[obj.country];
}
return Object.freeze(obj);
}).then(function (carriers) {
log.debug('Loaded ' + carriers.length + ' carriers');
_carriers = carriers;
});
}).finally(function () {
_ready = true;
});
}
reload().catch(function (err) {
log.crit({ message: 'Could not load constants', reason: err });
process.exit(-42);
}).done();
module.exports = {
reload : reload,
ready : function () { return _ready; },
countries : function () { return _countries; },
carriers : function () { return _carriers; }
};
utils.js
var log = sysLog('core', 'utils')
, constants = require('./constants');
module.exports = {
getCountryByISO: function(iso) {
if (!iso) {
return;
}
if ('string' != typeof iso) {
throw new Error('getCountryByISO requires a string');
}
if (!constants.ready()) {
throw new UnavailableError('Try again in a few seconds');
}
switch (iso.length) {
case 2:
return _.findWhere(constants.countries(), { 'iso2' : iso.toUpperCase() });
case 3:
return _.findWhere(constants.countries(), { 'iso3' : iso.toUpperCase() });
default:
throw new Error('getCountryByISO requires a 2 or 3 letter ISO code');
}
},
getCarrierByCode: function(code) {
if (!code) {
return;
}
if ('string' != typeof code) {
throw new Error('getCarrierByCode requires a string');
}
if (!constants.ready()) {
throw new UnavailableError('Try again in a few seconds');
}
return _.findWhere(constants.carriers(), { 'code' : code });
},
getCarrierByHandle: function(handle) {
if (!handle) {
return;
}
if ('string' != typeof handle) {
throw new Error('getCarrierByHandle requires a string');
}
if (!constants.ready()) {
throw new UnavailableError('Try again in a few seconds');
}
return _.findWhere(constants.carriers(), { 'handle' : handle });
}
};
Use case
if (data.handle) {
carrier = utils.getCarrierByHandle(data.handle);
if (_.isEmpty(carrier)) {
throw new InternalError('Unknown carrier', { handle: data.handle });
}
}
What's going on: All errors are logged; as soon as I see an error (i.e. "Unknown carrier") in the logs, I check the SQL database to see if it should've been recognised. That has always been the case so far, so I check the debug log to see if data was loaded. I always see "Loaded X countries" and "Loaded Y carriers" with correct values and no sign of "Could not load constants" or any other kind of trouble.
This happens around 10% of the time I start the application and the problem persists (i.e. didn't seem to go away after 12+ hours) and seems to occur regardless of input, leading me to think that the data isn't referenced correctly.
Questions:
Is there something wrong in constants.js or am I doing something very obviously wrong? I've tried setting it up for cyclical loading (even though I am not aware of that happening in this case).
Why can't I (sometimes) access my data?
What can I do to figure out what's wrong?
Is there any way I can work around this? Is there anything else I could to achieve the desired behaviour? Hard-coding the data in constants.js is excluded.
Additional information:
constants.reload() is never actually called from outside of constants.js.
constants.js is required only in utils.js.
utils.js is required in app.js (application entry); all files required before it do not require it.
SQL access is done through an in-house library built on top of knex.js and bluebird; so far it's been very stable.
Versions:
Node.js v0.10.33
underscore 1.7.0
bluebird 2.3.11
knex 0.6.22
}).finally(function () {
_ready = true;
});
Code in a finally will always get called, regardless of if an error was thrown up the promise chain. Additionally, your reload().catch(/* ... */) clause will never be reached, because finally swallows the error.
Geo.Country.find() or Geo.Carrier.Descriptor.find() could throw an error, and _ready would still be set to true, and the problem of your countries and carriers not being set would persist.
This problem would not have occurred if you had designed your system without a ready call, as I described in my previous post. Hopefully this informs you that the issue here is really beyond finally swallowing a catch. The real issue is relying on side-effects; the modification of free variables results in brittle systems, especially when asynchrony is involved. I highly recommend against it.
Try this
var log = sysLog('core', 'constants');
var Geo = require('../models/geo.js');
var index;
var _countries;
var _carriers;
function reload() {
index = Object.create(null);
_countries = Geo.Country.find().map(function (country) {
var obj = country.toPlainObject();
var id = obj.id;
delete obj.id;
index[id] = obj;
return Object.freeze(obj);
});
_carriers = _countries.then(function(countries) {
return Geo.Carrier.Descriptor.find().map(function (carrier) {
var obj = carrier.toPlainObject();
if (obj.country) {
obj.country = index[obj.country];
}
return Object.freeze(obj);
});
});
return _carriers;
}
reload().done();
module.exports = {
reload : reload,
countries : function () { return _countries; },
carriers : function () { return _carriers; }
};
constants.reload() is never actually called from outside of
constants.js.
That's your issue. constants.reload() reads from a database, which is an aysnchronous process. Node's require() is a synchronous process. At the time constants.js is required in utils.js and the module.exports value is returned, your database query is still running. And at whatever point in time that app.js reaches the point where it calls a method from the utils module, that query could still be running, resulting in the error.
You could say that requiring utils.js has the side-effect of requiring constants.js, which has the side-effect of executing a database query, which has the side-effect of concurrently modifying the free variables _countries and _carriers.
Initialize _countries and _carriers as unresolved promises. Have reload() resolve them. Make the utils.js api async.
promises.js:
// ...
var Promise = require('bluebird');
var countriesResolve
, carriersResolve;
var _ready = false
, _countries = new Promise(function (resolve) {
countriesResolve = resolve;
})
, _carriers = new Promise(function (resolve) {
carriersResolve = resolve;
});
function reload() {
_ready = false;
var index = Object.create(null);
return Geo.Country.find().map(function (country) {
// ...
}).then(function (countries) {
log.debug('Loaded ' + countries.length + ' countries');
countriesResolve(countries);
return Geo.Carrier.Descriptor.find().map(function (carrier) {
// ...
}).then(function (carriers) {
log.debug('Loaded ' + carriers.length + ' carriers');
carriersResolve(carriers);
});
}).finally(function () {
_ready = true;
});
}
reload().catch(function (err) {
log.crit({ message: 'Could not load constants', reason: err });
process.exit(-42);
}).done();
module.exports = {
reload : reload,
ready : function () { return _ready; },
countries : function () { return _countries; },
carriers : function () { return _carriers; }
};
utils.js
getCarrierByHandle: function(handle) {
// ...
return constants.carriers().then(function (carriers) {
return _.findWhere(carriers, { 'handle' : handle });
});
}
Use case:
utils.getCarrierByHandle(data.handle).then(function (carrier) {
if (_.isEmpty(carrier)) {
throw new InternalError('Unknown carrier', { handle: data.handle });
}
}).then(function () {
// ... next step in application logic
});
This design will also eliminate the need for a ready method.
Alternatively, you could call constants.reload() on initialization and hang all possibly-dependent operations until it completes. This approach would also obsolete the ready method.
What can I do to figure out what's wrong?
You could have analyzed your logs and observed that "Loaded X countries" and "Loaded Y carriers" were sometimes written after "Unknown carrier", helping you realize that the success of utils.getCarrierByHandle() was a race condition.