In an online blockchain developer course I am participating in, one prerequisite was javascript object oriented & async programming, which I do not have experience in. However, having some experience in programming, I figured I would just learn as I go.
A practice activity (not a graded one, those I can not get help on) on blockchain hashing wants us to create an async hashing function using promises to hash data with SHA256. There are 2 main files: app.js, and block.js .
app.js (this was given to us, it is the main file we run):
/**
* Importing the Block class
*/
//
const BlockClass = require('./block.js');
/**
* Creating a block object
*/
const block = new BlockClass.Block("Test Block");
// Generating the block hash
block.generateHash().then((result) => {
console.log(`Block Hash: ${result.hash}`);
console.log(`Block: ${JSON.stringify(result)}`);
}).catch((error) => {console.log(error)});
/**
* Step 3: Run the application in node.js
*
*/
// From the terminal: cd into Project folder
// From the terminal: Run node app.js to run the code
block.js ( the arrows indicate the part I added myself, everything else was given.
/**
* Import crypto-js/SHA256 library
*/
const SHA256 = require('crypto-js/sha256');
/**
* Class with a constructor for block
*/
class Block {
constructor(data){
this.id = 0;
this.nonce = 144444;
this.body = data;
this.hash = "";
}
/**
* Step 1. Implement `generateHash()`
* method that return the `self` block with the hash.
*
* Create a Promise that resolve with `self` after you create
* the hash of the object and assigned to the hash property `self.hash = ...`
*/
//
generateHash() {
// Use this to create a temporary reference of the class object
let self = this;
> //Implement your code here
> self.hash = SHA256(JSON.stringify(self.body));
> const promise = new Promise(function(myResolve,myReject){
> if(self.hash = SHA256(JSON.stringify(self.body))){
> myResolve(self);
> }else{
> myReject(Error("It Broke"));
> }
>
> });
>
> promise.then(
> function(result){this.hash = result.hash;},
> function(error){console.log(error);}
> );
> }
}
// Exporting the class Block to be reuse in other files
module.exports.Block = Block;
Utilizing online resources, I do sort of understand promises, but not really- and definitely not how to apply them here. I was hoping I could get some help on this.
Related
I am adding new script for YouTrack for adding new issue. And after adding new issue i need to show new issue for user for editing. And i don't know how it is possible. How is it do? Maybe should i get access to common methods? But i don't know how to do it.
My script
/**
* This is a template for an action rule. This rule defines a custom command
* and the changes that are applied by the command.
*
* For details, read the Quick Start Guide:
* https://www.jetbrains.com/help/youtrack/cloud/2022.2/Quick-Start-Guide-Workflows-JS.html
*/
const entities = require('#jetbrains/youtrack-scripting-api/entities');
exports.rule = entities.Issue.action({
// TODO: give the rule a human-readable title
title: 'Create_sub_task',
// TODO: define the custom command
command: 'create_sub_task',
guard: (ctx) => {
// TODO: define the condition that must be met to enable the custom command
return true;
},
action: (ctx) => {
var issue = ctx.issue;
var createIssue = function(name) {
var newIssue = new entities.Issue(ctx.currentUser, issue.project,
name + ' for ' + issue.summary);
//newIssue.fields.Subsystem = subsystem;
newIssue.fields.Type = "Detail";
newIssue.links['subtask of'].add(issue);
issue.links['parent for'].add(newIssue);
newIssue.fields.Author = entities.User.current;
// common.document.location.href = 'https://firstbiterp.youtrack.cloud/issue/FBITERP-2/IMBA';
return newIssue;
};
createIssue('NewTask');
},
requirements: {
// TODO: add requirements
}
});
This is currently not possible. The workflows run on the youtrack server, not in clients browser, and there is no support for passing such command back to client.
I believe the closest thing you can do is to show a notification with a link to the newly created issue. Look at clone as draft command, which does exactly that.
I've decided on splitting my environments keeping them in .js files in an environment folder and keep all the sensitive information in .env file (use a third-party module 'DOTENV')
That's what I've come up with but I understand that it's not the best practice and there are a lot of things which should have been implemented in a completely different way but I just lack experience and practice.
At first, I tried to use as more " for loop " as it's possible because as far as I know, it's the fastest way to loop through an object, but in some cases, it was much easier to with "map or filter".
It doesn't look nice to assign data by returning a Promise. Maybe there is a way to get data without a Promise?
I would appreciate any suggestions on how the code can be improved and good practices, your experience.
And I am not sure if I used logging right and error handling. That's a completely new thing for me at the moment, but I used "try catch" to catch them and simply logged them on the console and put into a file.
code:
import { readdirSync } from 'fs';
import path from "path";
import { logger } from '../src/utils/logging';
import { merge } from "lodash";
// FIXME: Function returns a Promise with the data.
// It's not comfortable and seem a bad practice - too much code for a simple task,
// and deal with a promise what may outcome in decreasing perfomance
// ( The simplest code, the fastest code )
export let env = getEnvironment().then(
res => { return res },
err => logger.error(err)
);
// TODO: Rewrite this function into a class Environment to keep it organized and implement ES6 standart
async function getEnvironment() {
const mode = process.env.NODE_ENV || 'development';
const rootPath = process.cwd();
const folder = 'environment';
const loadEnvironments = () => {
// Getting the list of available environments in the "environment" folder,
// at the same time excluding index.js file
const list = readdirSync(path.join(rootPath, folder)).filter(file => !/(?=^(index.js))/i.test(file));
const parameters = {};
// Loading the files found in the folder,
// merging them with the help of a "lodash" library
// just to get one common Object with all possible parameters from all found environments
const loaded = list.map(fileName => {
let name = fileName.split('.')[0];
let loadedFile = require(path.join(rootPath, folder, fileName));
const file = loadedFile[name];
merge(parameters, { ...file });
return loadedFile;
});
// Picking the currect mode out of already loaded ones
const current = { ...loaded.filter(file => file[mode]).map(file => file[mode])[0] };
// Returning an object with all parameters
return {
parameters,
current
}
};
const environments = loadEnvironments();
const environment = {} = looping(environments.parameters, environments.current);
function looping(obj, values) {
const collection = {};
for (const key in obj) {
if (obj.hasOwnProperty(key)) {
if (typeof obj[key] !== 'object') {
try {
if (values.hasOwnProperty(key)) {
// By a recursive function run through all parameters,
// transforming the keys to uppercased,
// assigning value to 'obj' (file containing all the parameters)
// from the current mode
collection[key.toUpperCase()] = values[key];
} else {
// if there is no such a key in the current mode,
// 'null' is assigned
collection[key.toUpperCase()] = null;
}
} catch (e) {
logger.error(` Missing parameter "${key.toUpperCase()}" in ${mode} mode!!!`);
}
} else {
// Recursing through the object and the nested objects
collection[key.toUpperCase()] = looping(obj[key], values[key]);
}
}
}
return collection;
}
// When parameters are ready,
// the current mode is assigned
environment["MODE"] = mode;
return environment;
}
I have a network of multiple “nodes” running Meteor v1.5.4.2 (due to dependencies). Each of these nodes is supposed to be able to communicate with the others to fetch statistics etc. This is done using Meteors ddp-client server side on the nodes that should get information from the others.
This seemingly worked well, but when we started provoking it with a lot of changes in the network (meaning a lot of connections come and go) the memory gradually builds up until it freezes up and belly flops. I have limited experience resolving memory leaks, but by looking at heap snapshots, I found that there’s a buildup of objects called “Connection” (pasted below). Under strings I also find a lot of strings containing the certs and CRL’s used in the DDP connection, leading me to believe theres an issue with my code involving the connection handling. Ive tried to list the highlights below, removing a lot of minor logic involved.
I am at a little bit of a loss as to further approach this, so any suggestions, thoughts or ideas would be most welcome.
Thanks in advance.
Heres a compressed run down of how its connected
if(Meteor.isServer) {
connectionHandler = new DDPConnectionHandler();
Meteor.setInterval( () => connectionHandler.checkNodeConnections(), 5000);
}
export const DDPConnectionHandler = function() {
this.connections = [];
this.checkNodeConnections = () => {
// Logic to add or remove the node connections in this.connections
// Looping pr. node to handle
const node = {...} // Details of the node to add/remove
// Add new conncetion
this.connections.push( new DDPConnection(node) );
// Remove connection
const index = currentConnections.indexOf(node.id);
this.connections[index].disconnect();
this.connections.splice(index, 1);
};
}
export const DDPConnection = function(node) {
let self = this;
// setting up variables to use, pw, user, url ... etc.
this.connection = DDP.connect(url, { /* certs etc. for SSL */ });
this.connection.call("login", {/* login details */}, (error, result) => {
if( !error ) {
// Wrap in timeout to space out the stats calls
Meteor.setTimeout( () => { self.initNodeStats(); }, randomNumber );
} else { /* No luck */ }
});
this.disconnect = () => {
this.connection.disconnect(); // also tried with .close()
};
this.subscribe = (collection) => {
// function to fetch other data
};
// Initialize and start recieving default basis ndoestats from current external nde
this.initNodeStats = () => { this.getStats(); };
this.getStats = () => {
self.connection.call('getStats', {}, (error, result) => {
if( error ) { /* No luck */
} else if ( result ) { /* Pass data to handlers */ }
});
}
}
Connection
_stream::ClientStream
__proto__::Object
_outstandingMethodBlocks::Array
__flushBufferedWrites::()
map::system / Map
_methodInvokers::Object
properties::(object properties)[]
_bufferedWritesFlushAt::system / Oddball
_bufferedWritesFlushHandle::system / Oddball
_lastSessionId::system / Oddball
_retryMigrate::system / Oddball
_userId::system / Oddball
_version::system / Oddball
_versionSuggestion::system / Oddball
onReconnect::system / Oddball
_supportedDDPVersions::Array
_userIdDeps::Tracker.Dependency
_bufferedWrites::Object
_documentsWrittenByStub::Object
_methodHandlers::Object
_methodsBlockingQuiescence::Object
_serverDocuments::Object
_stores::Object
_subsBeingRevived::Object
_subscriptions::Object
_updatesForUnknownStores::Object
_afterUpdateCallbacks::Array
_messagesBufferedUntilQuiescence::Array
_resetStores::system / Oddball
Update, after digging some more.
I seem to be getting a buildup of "methods" in the "_outstandingMethodBlocks" attribute on the Connection objects. Which is defined on line 129 here:
https://github.com/meteor/meteor/blob/release-1.5.4.2/packages/ddp-client/livedata_connection.js#L129
Maybe theres some timeout setting I could use to stop them from being stored there?
I have multiple files that start with comments like:
/*
* #title Force email verification
* #overview Only allow access to users with verified emails.
* #gallery true
* #category access control
*
* This rule will only allow access users that have verified their emails.
*
* > Note: It might be a better UX to make this verification from your application.
*
* If you are using [Lock](https://auth0.com/docs/lock), the default behavior is to log in a user immediately after they have signed up.
* To prevent this from immediately displaying an error to the user, you can pass the following option to `lock.show()` or similar: `loginAfterSignup: false`.
*
* If you are using [auth0.js](https://auth0.com/docs/libraries/auth0js), the equivalent option is `auto_login: false`.
*
*/
//jshint -W025
function (user, context, callback) {
if (!user.email_verified) {
return callback(new UnauthorizedError('Please verify your email before logging in.'));
} else {
return callback(null, user, context);
}
}
All files contains two types of comments i.e /**/ and // Now I am reading this file in my javascript code and want to remove comments and get the actual code in the variable e.g
function (user, context, callback) {
if (!user.email_verified) {
return callback(new UnauthorizedError('Please verify your email before logging in.'));
} else {
return callback(null, user, context);
}
}
I have tried using strip-comments and parse-comments npm but none of these work. Here is the code:
const fs = require('fs');
const path = require('path');
const strip = require('strip-comments');
module.exports = function (ruleFileName, globals, stubs) {
globals = globals || {};
stubs = stubs || {};
const fileName = path.join(__dirname, '../src/rules', ruleFileName + '.js');
const data = fs.readFileSync(fileName, 'utf8');
const code = strip(data);
console.log(code);
return compile(code, globals, stubs);
}
and with parse-comments I tried like:
const parsed = parseComments(data)[0];
const code = data.split('\n').slice(parsed.comment.end).join('\n').trim();
I think strip comment is not working because it takes string as an argument but fs.readFileSync doesn't return string. I have also tried data.toString()but that also didn't work. So how can I strip comments from the content? Is there any other solution?
try use regx to replace /\/\*[\s\S]*?\*\/|([^:]|^)\/\/.*$/gm
var Text = `/*
* #title Force email verification
* #overview Only allow access to users with verified emails.
* #gallery true
* #category access control
*
* This rule will only allow access users that have verified their emails.
*
* > Note: It might be a better UX to make this verification from your application.
*
* If you are using [Lock](https://auth0.com/docs/lock), the default behavior is to log in a user immediately after they have signed up.
* To prevent this from immediately displaying an error to the user, you can pass the following option to "lock.show()" or similar: "loginAfterSignup: false".
*
* If you are using [auth0.js](https://auth0.com/docs/libraries/auth0js), the equivalent option is "auto_login: false".
*
*/
//jshint -W025
function (user, context, callback) {
if (!user.email_verified) {
return callback(new UnauthorizedError('Please verify your email before logging in.'));
} else {
return callback(null, user, context);
}
}`
console.log(Text.replace(/\/\*[\s\S]*?\*\/|([^:]|^)\/\/.*$/gm,''))
like this
https://codepen.io/anon/pen/eQKrWP
In my app.js I have the following:
angular.module('app').controller('userList',
['appSettings'
,function (/*#type {app.appSettings}*/appSettings) {
appSettings.<<== it shows a list here but nothing from autocomplete.js
In my autocomplete.js I have the following (generated by JavaScript printing out my services and their members):
var app={};
app.appSettings={};
app.appSettings.userFailMessages={};
app.appSettings.userFailMessages.invalidJson
="Invalid request, user sent is not valid json.";
NetBeans refuses to code complete appSettings for me and doesn't seem to know it's defined in autocomplete.js. Maybe I'm getting my js doc wrong but tried a mix of combination of #var, #type and #param without success.
It code completes when I type app.appSettings. and gives me a list from autocomplete.js but I would like to know how I can tell NetBeans that the passed argument to the function is app.appSettings.
Maybe I should have autocomplete contain constructor functions instead of object literals as #type suggests a certain type and not an instance.
This is NetBeans 7.3.1
Was close to the answer, to have NetBeans use type you have to define the type. Then to indicate that the parameters passed to your angular module (or any function) are of a certain type I use the #param jsdoc
The angular module:
angular.module('app').controller('userList'
, ['$scope','appRules','appSettings'
,/**
* #param {app.appRules} appRules
* #param {app.appSettings} appSettings
* */
function ($scope,appRules,appSettings,$timeout) {
//<== here both appRules and appSettings give suggestions
// from autocomplete
autocomplete.js (not included in my html file but just there for code suggest)
/*#typedef {Object} app*/
var app={};
app.appRules={};
app.appRules.userIsInRole=function (user,role){};
app.appRules.general={};
app.appRules.general.isEmpty=function (val){};
app.appRules.general.isEmail=function (val){};
app.appSettings={};
app.appSettings.userFailMessages={};
app.appSettings.userFailMessages.invalidJson
="Invalid request, user sent is not valid json.";
app.appSettings.userFailMessages.noPrivilege
="You do not have the privileges needed to change this user.";
I ran the following code in the console on a page that contains my app to generate autocomplete.js:
var inj;
function allServices(mod, r) {
if (!r) {
r = {};
inj = angular.element(document.querySelector('[data-ng-app]')).injector().get;
}
angular.forEach(angular.module(mod).requires, function(m) {
allServices(m, r)
});
angular.forEach(angular.module(mod)._invokeQueue, function(a) {
try {
r[a[2][0]] = inj(a[2][0]);
} catch (e) {
}
});
return r;
};
var output=[];
function addOutput(names,prop){
if(names.length===1){
output.push('var ');
}
output.push(names.join('.'));
if(typeof prop === 'object'){
output.push('={};\n');
for(thing in prop){
//TODO: no arrays or route paths
if(/[0-9\/\\]/.test(thing)){
continue;
}
names.push(thing);
addOutput(names,prop[thing]);
}
}else{
output.push('=');
output.push(
(typeof prop === 'function')?
prop.toString():
JSON.stringify(prop)
);
output.push(';\n');
}
names.pop();
}
function createOutput(){
allMyServices = allServices('app');
addOutput(['app'],allMyServices);
console.log(output.join(''));
}
createOutput();