Here is my output
Here is my code, here i am changing arrayItem.replace("\u001bE", "") my problem is i want to change for another string also "\x1B-1" --> how to declare this in my code replacing multiple strings at a time.
const file = files[0];
let reader = new FileReader();
const fruits = [];
reader.onload = (e) => {
const file = e.target.result;
const lines = file.split(/\r\n|\n/);
//console.log(lines.length);
textarea.value = lines.join('\n');
lines.forEach(function (arrayItem) {
//console.log(arrayItem.replace("\u001bE", "<b>"));
var arr1 = arrayItem.replace("\u001bE", "<b>");
var arr2 = arr1.replace("\u001bF", "</b>");
fruits.push(arr2);
});
console.log(fruits.length);
if (parseInt(fruits.length) > 0) {
console.log(fruits);
txtreplace.innerHTML = fruits;
}
};
reader.onerror = (e) => alert(e.target.error.name);
reader.readAsText(file);
});
}, false);
Not sure if this is what you're asking but you can chain multiple calls to replace like this:
var arr1 = arrayItem.replace("\u001bE", "<b>").replace("\x1B-1", "<something>");
Related
Here is my output image
Here is my code
let input = document.querySelector('input');
var textarea = document.getElementsByClassName('area')[0];
var txtreplace = document.getElementsByClassName('demo')[0];
window.addEventListener('load',
function () {
input.addEventListener('change', () => {
let files = input.files;
if (files.length == 0) return;
const file = files[0];
let reader = new FileReader();
const fruits = [];
reader.onload = (e) => {
const file = e.target.result;
const lines = file.split(/\r\n|\n/);
//console.log(lines.length);
textarea.value = lines.join('\n');
lines.forEach(function (arrayItem) {
//console.log(arrayItem.replace("\u001bE", ""));
var arr1 = arrayItem.replace("\u001bE", "");
var arr2 = arr1.replace("\u001bF", "");
fruits.push(arr2);
});
console.log(fruits.length);
if (parseInt(fruits.length) > 0) {
console.log(fruits);
txtreplace.innerHTML = fruits;
}
};
reader.onerror = (e) => alert(e.target.error.name);
reader.readAsText(file);
});
}, false);
function printDiv() {
var divContents = document.getElementsByClassName("demo").innerHTML;
var a = window.open('', '', 'height=500, width=500');
a.document.write('<html>');
a.document.write('<body > <h1>Print PDF <br>');
a.document.write(divContents);
a.document.write('</body></html>');
a.document.close();
a.print();
}
</script> </body>
You can take a look at https://github.com/parallax/jsPDF.
It's js library allow you to build pdf using Javascript.
Otherwise have some solutions to convert html page to pdf ( https://www.html2pdf.co.uk/ ), but I don't think it's what you are looking for.
Exemple of code using JSPDF :
<script src="https://cdnjs.cloudflare.com/ajax/libs/jspdf/1.3.2/jspdf.min.js"></script>
var doc = new jsPDF();
doc.text(20, 20, 'Hello world!');
doc.text(20, 30, 'This is client-side Javascript, pumping out a PDF.');
doc.save('Test.pdf');
Hope it will helps you ;)
I'm uploading and then reading the CSV file but I'm facing an issue while splitting it, so basically, column names in CSV contain ',' so when I'm going to split the columns with ',' so I don't get the full column value, please suggest me some proper way for it. Thanks
const readCsv = (file) => {
const reader = new FileReader();
const filetext = reader.readAsBinaryString(file);
reader.addEventListener('load', function (e) {
const data = e.target.result;
let parsedata = [];
let newLinebrk = data.split('\n');
for (let i = 0; i < newLinebrk.length; i++) {
parsedata.push(newLinebrk[i].split(','));
}
console.log("parsedData: ", parsedata);
});
};
CSV:
column 1 column2
test lorem, ipsum, dummy/text
after splitting:
['test', 'lorem', 'ipsum', 'dummy/text']
so by doing that I'm unable to get a proper column name that contains a comma in string.
In my case, I used Papa Parse which fulfills my all requirements.
const readCsv = (file) => {
const reader = new FileReader();
reader.readAsBinaryString(file);
reader.addEventListener('load', function (e) {
const data = e.target.result;
Papaparse.parse(data, {
complete: function (results) {
console.log("results: ", results.data);
},
});
});
};
My apologies as I am very new to javascript and inexperienced so my code is very rudimentary, and I'll do the best I can at explaining myself.
I'm making a call to an API to get a blob which I then convert to base64. I can then pass this base64 string to a seperate script within the .map array - so this makes multiple calls to my script. But what I want to do is to concatenate the base64 strings into one string and pass a single call the my script.
I understand that this is asynchronous and I probably need some callbacks but im unsure how to do this in my case. Essentially I would like to make the 'base64data_1' variable within the reader.onloadend function available outside of the .map array, but the reader.onloadend function is the last action performed.
function getPhoto() {
const root = 'https://public-api.konectech.com/PROD/api/';
var acc = document.getElementById("accID").value;
var photoId1 = document.getElementById("photoID1").value;
var photoId2 = document.getElementById("photoID2").value;
var photoId3 = document.getElementById("photoID3").value;
var root1 = (root + acc + "/attachment/id/overlay/")
let uri = [root1 + photoId1,root1 + photoId2,root1 + photoId3];
let filenames = [photoId1,photoId2,photoId3];
var base64Array = [];
let base64 = uri.map (function (url) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url);
xhr.setRequestHeader("accept", "image/jpg");
xhr.setRequestHeader("content-type", "application/json");
xhr.setRequestHeader("x-api-key", "xxxxxxxx");
xhr.responseType = "blob";
xhr.send();
xhr.onload = function (data, filename, mime) {
var filename = (url.slice(url.lastIndexOf('/') + 1) );
const blob = new Blob([this.response], {type: mime || 'application/octet-stream'});
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
var base64data = reader.result;
const base64data_1 = (base64data.slice(base64data.lastIndexOf(',') + 1) );
//console.log([base64data_1]);
base64Array = base64data_1;
console.log([base64Array]);
return [base64Array];
};
}
});
//console.log([base64Array]);
}
I was hoping that if i could get access pass the 'base64data_1' variable as an array I could then use promise.all and .join get the desired result similar to below:
function getPhoto() {
var array =['test_1','test_2','test_3','test_4','test_5'];
var items = [];
let newArray = array.map (function (array_1) {
items = array_1 + '_appended';
console.log(items);
return [items];
})
console.log(newArray);
Promise.all(newArray)
.then( function (arrayOfValues) {
console.log(arrayOfValues.join(",")); // returns test_1_appended,test_2_appended,test_3_appended,test_4_appended,test_5_appended
})
}
In JavaSctipt the FileReader object doesn't seem to have support for just reading the first line of a file. (up to the CR '\n'). I dont want to read in the whole file to save memory.
Is there a way to do it?
My code (note that readLine() function does not exists):
self.loadFirstLineFromFile = function (options, callback) {
var hiddenElement = document.createElement('input');
hiddenElement.id = 'hidden-tsv-file-loader';
hiddenElement.type = 'file';
hiddenElement.accept = options.extension;
hiddenElement.style.display = 'none';
hiddenElement.addEventListener('change', function (event) {
var file = event.target.files[0];
var reader = new FileReader(file);
var firstLine;
firstLine = reader.readLine();
callback(firstLine);
});
document.body.appendChild(hiddenElement);
hiddenElement.click();
};
There's nothing builtin for that, but it's simple to implement:
var file = event.target.files[0];
var sliced = file.slice(0, 2048); // Pick a size that you're ok with
// NOTE: `await` keyword requires transpiling (Babel) for IE11,
// and to be inside an async function. An alternative is:
// sliced.text().then(function(text) { console.log(text); });
var text = await sliced.text();
console.log(text);
Here's an interface that reads the data from the Blob decoded as text and chunked by a delimiter:
async function* readLines (blob, encoding = 'utf-8', delimiter = /\r?\n/g) {
const reader = blob.stream().getReader();
const decoder = new TextDecoder(encoding);
try {
let text = '';
while (true) {
const { value, done } = await reader.read();
if (done) break;
text += decoder.decode(value, { stream: true });
const lines = text.split(delimiter);
text = lines.pop();
yield* lines;
}
yield text;
} finally {
reader.cancel();
}
}
We can use this to read a single line and discard the rest without reading the entire file:
hiddenElement.addEventListener('change', async function (event) {
const file = event.target.files[0];
for await (const line of readLines(file, 'utf-8', '\n')) {
callback(line);
return; // signals reader.cancel() to the async iterator
}
});
Since I use Javascript with Knockout I refactored Patricks solution into this:
self.loadStream = function (options, callback) {
var hiddenElement = document.createElement('input');
hiddenElement.id = 'hidden-tsv-file-loader';
hiddenElement.type = 'file';
hiddenElement.accept = options.extension;
hiddenElement.style.display = 'none';
hiddenElement.addEventListener('change', function (event) {
var file = event.target.files[0];
var reader = file.stream().getReader();
var decoder = new TextDecoder('utf-8');
var data;
var readNextChunk = function () {
data = reader.read();
data.then(function (result) {
if (!result.value) {
callback({ chunk: '', done: true, shouldStop: true }, file);
} else {
var chunk = decoder.decode(result.value, { stream: true });
var args = {
chunk: chunk,
done: result.done,
shouldStop: true
};
callback(args, file);
if (!result.done && !args.shouldStop) {
readNextChunk();
}
}
});
};
readNextChunk();
hiddenElement.remove();
});
document.body.appendChild(hiddenElement);
hiddenElement.click();
};
I was trying to convert a blob to base64, and I found my way around, but while waiting the result from the function displayBase64String the map function in submitOffre returns an empty string even though console.log prints some data.
I'll appreciate any solution
here is my code.
submitOffre = (saleData) => {
debugger ;
var result = base64Service.displayBase64String(saleData);
console.log("========", result);
const rs = result.map(value => value.file); // Doesn't work.
console.log(rs); // rs is empty
}
class Base64Service {
blobToBase64 = (blob, callback) => {
var reader = new FileReader();
var data = '';
reader.onload = function () {
var dataUrl = reader.result;
var base64 = dataUrl.split(',')[1];
callback(base64);
};
reader.readAsDataURL(blob);
}
displayBase64String(formProps) {
const result = [];
const outbut = Object.entries(formProps.imageToUpload).map(([key, value]) => {
this.blobToBase64(value, (data) => {
result.push({ "file": `data:${value.type};base64,${data}` })
})
});
return result;
};
}
export default new Base64Service();
Something like that might help:
I've modified your code a bit, just to show you the basic pattern.
If you're doing more than 1 image at a time, you will need to use Promise.all, to keep track of more than 1 promise at once.
submitOffre = async (saleData) => { // SEE THE async KEYWORD
debugger ;
var result = await blobToBase64(saleData); // SEE THE await KEYWORD
console.log("========", result);
const rs = result.map(value => value.file); // Doesn't work.
console.log(rs); // rs is empty
}
I'll treat as if you were converting only 1 image.
blobToBase64 = (blob, callback) => new Promise((resolve,reject) => {
var reader = new FileReader();
var data = '';
reader.onload = function () {
var dataUrl = reader.result;
var base64 = dataUrl.split(',')[1];
callback(base64);
resolve(base64); // NOTE THE resolve() FUNCTION TO RETURN SOME VALUE TO THE await
};
reader.readAsDataURL(blob);
});