How to Display image from path in pdf header using jsPDF-autotable - javascript

I am generating html to pdf using jsPDF jsPDF-autotable, the header, and footer is being printed well. I have tried adding a logo to the header but it is failing to print it, when I console.log(base64Img) and convert it, it is the same logo.
This is the code to convert image to base64 string
imgToBase64 (url, callback) {
if (!window.FileReader) {
callback(null);
return;
}
const xhr = new XMLHttpRequest();
xhr.responseType = 'blob';
xhr.onload = function () {
const reader = new FileReader();
reader.onloadend = function () {
callback((reader.result as string).replace('text/xml', 'image/jpeg'));
};
reader.readAsDataURL(xhr.response);
};
xhr.open('GET', url);
xhr.send();
And this is the code to generate the pdf
generatePDF() {
let base64Img = null;
this.imgToBase64(this.imageURL, (base64) => {
base64Img = base64;
console.log('image in base 64', base64Img);
const doc = new jspdf('p', 'pt', 'a4');
const header = function(headerData: any) {
doc.setFontSize(20);
doc.setTextColor(0, 190, 208);
doc.setFontStyle('normal');
if (this.base64Img) {
doc.addImage(this.base64Img, 'JPEG', headerData.settings.margin.left, 15, 60, 10);
doc.setFontSize(20);
}
doc.text('Header Title', headerData.settings.margin.left, 60);
const currentdate = new Date();
const datetime = currentdate.getDate() + '/' + (currentdate.getMonth() + 1) + '/' + currentdate.getFullYear();
doc.text('Date: ' + datetime, headerData.settings.margin.left + 400, 60);
doc.setFontSize(5);
};
const totalPagesExp = '{total_pages_count_string}';
const footer = function(footerData) {
doc.setFontSize(10);
let str = 'Page ' + footerData.pageCount;
// Total page number plugin only available in jspdf v1.0+
if (typeof doc.putTotalPages === 'function') {
str = str + ' of ' + totalPagesExp;
console.log('test');
}
doc.text(str, footerData.settings.margin.left, doc.internal.pageSize.height - 10);
doc.text('https://wwww.example.com', footerData.settings.margin.left + 400, doc.internal.pageSize.height - 10);
};
const head = [['Name', 'Phone', 'Email', 'Gender', 'Location', 'Country']]
const options = {
beforePageContent: header,
afterPageContent: footer,
margin: {
top: 100
},
head: head,
columnStyles: {
0: {columnWidth: 100},
1: {columnWidth: 80},
2: {columnWidth: 80},
3: {columnWidth: 50},
}
};
const elem = document.getElementById('pdfContent');
const data = doc.autoTableHtmlToJson(elem);
doc.autoTable( data.columns, data.rows, options);
// Total page number plugin only available in jspdf v1.0+
if (typeof doc.putTotalPages === 'function') {
doc.putTotalPages(totalPagesExp);
}
doc.save('generated.pdf');
});
}

I thing i'm too late, but this will help others.
you can use like
var doc = new jsPDF('p', 'pt');
var header = function (data) {
doc.setFontSize(18);
doc.setTextColor(40);
doc.setFontStyle('normal');
doc.addImage(base64_source, 'JPEG', data.settings.margin.left, 20, 50, 50);
//doc.addImage(image base64_source, 'image format', logo_sizes.centered_x, _y, logo_sizes.w, logo_sizes.h);
//Image must be Base64 encoded
};
doc.autoTable(columns, rows, {margin: {top: 80}, beforePageContent: header});
doc.save("test.pdf");
Note: you have to use with your code variables this is only demo.

Related

With javascript, how do I wait for all .toBlob functions to complete before proceeding?

I'm processing images using javascript. For each image, I'm creating 4 files using .toBlob. The problem lies in the fact that .toBlob is asynchronous such that one .toBlob process can be running while the others have completed. await async does not seem to work. A counter for the number of images processed does not seem to work because the last image can be processed while a previous image has not finished. The last image increments the counter to the number of images and triggers a save.
I can do a setTimeout on the last image but that's just guessing at the max time.
Here's the code:
let kpData;
let repositoryData;
let collection;
let skuString = `,`;
let numImages;
let numImagesProcessed;
$(document).on(`click`, `.saveSkuImages`, function() {
numImagesProcessed = 0;
kpData = new FormData();
repositoryData = new FormData();
skuString = `,`;
const skuContainer = $(this).closest(`.skuContainer`);
const sku = skuContainer.attr(`sku`);
numImages = skuContainer.find(`.skuImgContainer`).length;
let i = 0;
skuContainer.find(`.skuImgContainer`).each(function() {
i++;
sic = $(this);
const skuImg = sic.find(`.skuImg`);
const imgContainer = skuImg.find(`.imgContainer`);
const img = imgContainer.find(`img:first`);
cvs = $(`#${img.attr(`forcanvas`)}`);
imgNum = parseInt(skuImg.attr(`imgnum`));
const filename = `${sku}${imgNum > 1 ? `-alt${imgNum}` : ``}.jpg`;
img.attr(`filename`, filename);
if (cvs.length) {
createImages(cvs[0], imgNum, filename,i);
} else { //if an image already exists, we didn't create a canvas for it and we don't need to recreate it.
numImagesProcessed++;
}
if (sic.find(`.useForSeasonal`).is(`:checked`)) {
kpData.append(`parentImage`, filename);
}
});
});
sgiArr = [`L`, `LI`, `I`, `K`, `Y`];
function createImages(loadedData,imgNum,filename,i) {
const mime_type = `image/jpeg`;
var cvs = document.createElement(`canvas`);
//Create the detail version of the image
cvs.width = 800;
cvs.height = 800;
ctx = cvs.getContext(`2d`);
ctx.drawImage(loadedData, 0, 0, 800, 800);
if (imgNum === 1 && sgiArr.indexOf(filename.split(`-`)[2]) >= 0) {
// attach the size watermark to our primary image if it's a kid's product
let watermark = document.getElementById(`sgi${filename.split(`-`)[2]}`);
ctx.drawImage(watermark, 10, 720);
}
const newImageData = cvs.toDataURL(mime_type);
const result_image_obj = new Image();
result_image_obj.src = newImageData;
if (imgNum === 1 && sgiArr.indexOf(filename.split(`-`)[2]) >= 0) {
// display the image if we've attached the size watermark to it
$(`img[filename="${filename}"]`).attr(`src`, result_image_obj.src);
}
cvs.toBlob((blob) => {
let file = new File([blob], filename, { type: `image/jpeg` });
kpData.append(`detail`, file, filename);
}, `image/jpeg`,0.96);
//create the general image
cvs.width = 370;
cvs.height = 370;
cvs.getContext(`2d`).drawImage(loadedData, 0, 0, 370, 370);
cvs.toDataURL(mime_type);
cvs.toBlob((blob) => {
let file = new File([blob], filename, { type: `image/jpeg` });
kpData.append(`general`, file, filename);
}, `image/jpeg`,0.96);
//create the thumbnail
cvs.width = 240;
cvs.height = 240;
cvs.getContext(`2d`).drawImage(loadedData, 0, 0, 240, 240);
cvs.toDataURL(mime_type);
cvs.toBlob((blob) => {
let file = new File([blob], filename, { type: `image/jpeg` });
kpData.append(`thumb`, file, filename);
}, `image/jpeg`,0.96);
//create the repository image for Amazon, Zulilly, Zappos and our wholesale customers. Zullily has the greatest minimum requirements so we'll use those for everyone
loadedData.toBlob((blob) => {
let file = new File([blob], filename, { type: `image/jpeg` });
repositoryData.append(`imgfiles`, file, filename);
numImagesProcessed++;
console.log(`repository data created: `, numImagesProcessed, imgNum);
if (numImagesProcessed === numImages) { // the process can get to here yet not all the previous .toBlob statments from previous images may have completed.
console.log(`finished. Trigger save`);
saveit();
}
}, `image/jpeg`, 0.92);
}
I've tried using an array of promises:
skuContainer.find(`.skuImgContainer`).each(function() {
promises.push(processSku($(this),sku));
});
$.when.apply($, promises).done(function() {
console.log(`promises are done:`,numFiles, numFilesCreated);
saveit();
});
That made it even worse. The done function kicked off before the second iteration has even completed.
I tried incrementing a counter in each of the .toBlob functions but the counter didn't increment in time for a "if(counter===numFilesToCreate) saveIt();"
I'm at a loss.
Thanks
UPDATE Here is the code I'm using for trying a Promise array:
$(document).on(`click`, `.saveSkuImages`, function() {
numImagesProcessed = 0;
numFilesCreated = 0;
kpData = new FormData();
repositoryData = new FormData();
skuString = `,`;
const skuContainer = $(this).closest(`.skuContainer`);
const sku = skuContainer.attr(`sku`);
numImages = skuContainer.find(`.skuImgContainer`).length;
numFiles = numImages * 4;
let i = 0;
def = [];
def = [];
skuContainer.find(`.skuImgContainer`).each(function() {
let skuFunc = new Promise((resolve, reject) => {
processSku($(this), sku);
resolve(``);
});
def.push(skuFunc);
});
ResolveAll();
});
async function ResolveAll() {
await Promise.all(def);
console.log(`promises are done:`, numFiles, numFilesCreated);
saveit();
}
function processSku(sic, sku) {
console.log(sku);
const skuImg = sic.find(`.skuImg`);
const imgContainer = skuImg.find(`.imgContainer`);
const img = imgContainer.find(`img:first`);
cvs = $(`#${img.attr(`forcanvas`)}`);
imgNum = parseInt(skuImg.attr(`imgnum`));
const filename = `${sku}${imgNum > 1 ? `-alt${imgNum}` : ``}.jpg`;
img.attr(`filename`, filename);
if (sic.find(`.useForSeasonal`).is(`:checked`)) {
kpData.append(`parentImage`, filename);
}
if (cvs.length) {
console.log(`creating images`);
createImages(cvs[0], imgNum, filename, i);
} else { //if an image already exists, we didn't create a canvas for it and we don't need to recreate it.
numImagesProcessed++;
numFilesCreated += 4;
}
UPDATE 2
The only reliable way to make this work is to check the fileCreatedCount against a fileExpected count -- in this case numFilesCreated vs numFiles -- in the .toBlob function:
function createImages(loadedData,imgNum,filename,i) {
const mime_type = `image/jpeg`;
var cvs = document.createElement(`canvas`);
//Create the detail version of the image
cvs.width = 800;
cvs.height = 800;
ctx = cvs.getContext(`2d`);
ctx.drawImage(loadedData, 0, 0, 800, 800);
if (imgNum === 1 && sgiArr.indexOf(filename.split(`-`)[2]) >= 0) {
// attach the size watermark to our primary image if it's a kid's product
let watermark = document.getElementById(`sgi${filename.split(`-`)[2]}`);
ctx.drawImage(watermark, 10, 720);
}
const newImageData = cvs.toDataURL(mime_type);
const result_image_obj = new Image();
result_image_obj.src = newImageData;
if (imgNum === 1 && sgiArr.indexOf(filename.split(`-`)[2]) >= 0) {
// display the image if we've attached the size watermark to it
$(`img[filename="${filename}"]`).attr(`src`, result_image_obj.src);
}
cvs.toBlob((blob) => {
let file = new File([blob], filename, { type: `image/jpeg` });
kpData.append(`detail`, file, filename);
numFilesCreated++;
console.log(numFilesCreated);
if (numFilesCreated === numFiles) {
console.log(`got the right number of files`);
saveit();
}
}, `image/jpeg`, 0.96);
//create the general image
cvs.width = 370;
cvs.height = 370;
cvs.getContext(`2d`).drawImage(loadedData, 0, 0, 370, 370);
cvs.toDataURL(mime_type);
cvs.toBlob((blob) => {
let file = new File([blob], filename, { type: `image/jpeg` });
kpData.append(`general`, file, filename);
numFilesCreated++;
console.log(numFilesCreated);
if (numFilesCreated === numFiles) {
console.log(`got the right number of files`);
saveit();
}
}, `image/jpeg`, 0.96);
//create the thumbnail
cvs.width = 240;
cvs.height = 240;
cvs.getContext(`2d`).drawImage(loadedData, 0, 0, 240, 240);
cvs.toDataURL(mime_type);
cvs.toBlob((blob) => {
let file = new File([blob], filename, { type: `image/jpeg` });
kpData.append(`thumb`, file, filename);
numFilesCreated++;
console.log(numFilesCreated);
if (numFilesCreated === numFiles) {
console.log(`got the right number of files`);
saveit();
}
}, `image/jpeg`, 0.96);
//create the repository image for Amazon, Zulilly, Zappos and our wholesale customers. Zullily has the greatest minimum requirements so we'll use those for everyone
loadedData.toBlob((blob) => {
let file = new File([blob], filename, { type: `image/jpeg` });
repositoryData.append(`imgfiles`, file, filename);
numImagesProcessed++;
numFilesCreated++;
console.log(numFilesCreated);
console.log(`repository data created: `, numImagesProcessed, imgNum);
if (numFilesCreated === numFiles) {
console.log(`got the right number of files`);
saveit();
}
}, `image/jpeg`, 0.92);
}
So, each of the four .toBlob functions in each iteration of createImages checks to see if it is the last file that needs to be created and if it is, it kicks off the save.
The problem was the createImages was reporting as having finished before all the files were created which then meant that creating a promise from that was meaningless. If .toBlob had a defer method or some way to .toBlob().then(), it would work.

Attempting to merge Record.js and driveApi

I have this record.js script to toggle a recording that is currently working as expected.
function Record_Current(config) {
config = config || {};
var self = this;
var audioInput;
var audioNode;
var bufferSize = config.bufferSize || 4096;
var recordedData = [];
var recording = false;
var recordingLength = 0;
var startDate;
var audioCtx;
this.toggleRecording = function() {
if (recording) {
self.stop();
} else {
self.start();
}
};
this.start = function() {
// reset any previous data
recordedData = [];
recordingLength = 0;
// webkit audio context shim
audioCtx = new(window.AudioContext || window.webkitAudioContext)();
if (audioCtx.createJavaScriptNode) {
audioNode = audioCtx.createJavaScriptNode(bufferSize, 1, 1);
} else if (audioCtx.createScriptProcessor) {
audioNode = audioCtx.createScriptProcessor(bufferSize, 1, 1);
} else {
throw 'WebAudio not supported!';
}
audioNode.connect(audioCtx.destination);
navigator.mediaDevices.getUserMedia({ audio: true })
.then(onMicrophoneCaptured)
.catch(onMicrophoneError);
};
this.stop = function() {
stopRecording(function(blob) {
self.blob = blob;
config.onRecordingStop && config.onRecordingStop(blob);
});
};
this.upload = function(url, params, callback) {
var formData = new FormData();
formData.append("audio", self.blob, config.filename || 'recording.wav');
for (var i in params)
formData.append(i, params[i]);
var request = new XMLHttpRequest();
request.upload.addEventListener("progress", function(e) {
callback('progress', e, request);
});
request.upload.addEventListener("load", function(e) {
callback('load', e, request);
});
request.onreadystatechange = function(e) {
var status = 'loading';
if (request.readyState === 4) {
status = request.status === 200 ? 'done' : 'error';
}
callback(status, e, request);
};
request.open("POST", url);
request.send(formData);
};
function stopRecording(callback) {
// stop recording
recording = false;
// to make sure onaudioprocess stops firing
window.localStream.getTracks().forEach((track) => { track.stop(); });
audioInput.disconnect();
audioNode.disconnect();
exportWav({
sampleRate: sampleRate,
recordingLength: recordingLength,
data: recordedData
}, function(buffer, view) {
self.blob = new Blob([view], { type: 'audio/wav' });
callback && callback(self.blob);
});
}
function onMicrophoneCaptured(microphone) {
if (config.visualizer)
visualize(microphone);
// save the stream so we can disconnect it when we're done
window.localStream = microphone;
audioInput = audioCtx.createMediaStreamSource(microphone);
audioInput.connect(audioNode);
audioNode.onaudioprocess = onAudioProcess;
recording = true;
self.startDate = new Date();
config.onRecordingStart && config.onRecordingStart();
sampleRate = audioCtx.sampleRate;
}
function onMicrophoneError(e) {
console.log(e);
alert('Unable to access the microphone.');
}
function onAudioProcess(e) {
if (!recording) {
return;
}
recordedData.push(new Float32Array(e.inputBuffer.getChannelData(0)));
recordingLength += bufferSize;
self.recordingLength = recordingLength;
self.duration = new Date().getTime() - self.startDate.getTime();
config.onRecording && config.onRecording(self.duration);
}
function visualize(stream) {
var canvas = config.visualizer.element;
if (!canvas)
return;
var canvasCtx = canvas.getContext("2d");
var source = audioCtx.createMediaStreamSource(stream);
var analyser = audioCtx.createAnalyser();
analyser.fftSize = 2048;
var bufferLength = analyser.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
source.connect(analyser);
function draw() {
// get the canvas dimensions
var width = canvas.width,
height = canvas.height;
// ask the browser to schedule a redraw before the next repaint
requestAnimationFrame(draw);
// clear the canvas
canvasCtx.fillStyle = config.visualizer.backcolor || '#fff';
canvasCtx.fillRect(0, 0, width, height);
if (!recording)
return;
canvasCtx.lineWidth = config.visualizer.linewidth || 2;
canvasCtx.strokeStyle = config.visualizer.forecolor || '#f00';
canvasCtx.beginPath();
var sliceWidth = width * 1.0 / bufferLength;
var x = 0;
analyser.getByteTimeDomainData(dataArray);
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * height / 2;
i === 0 ? canvasCtx.moveTo(x, y) : canvasCtx.lineTo(x, y);
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
}
draw();
}
function exportWav(config, callback) {
function inlineWebWorker(config, cb) {
var data = config.data.slice(0);
var sampleRate = config.sampleRate;
data = joinBuffers(data, config.recordingLength);
console.log(data);
function joinBuffers(channelBuffer, count) {
var result = new Float64Array(count);
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}
function writeUTFBytes(view, offset, string) {
var lng = string.length;
for (var i = 0; i < lng; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
var dataLength = data.length;
// create wav file
var buffer = new ArrayBuffer(44 + dataLength * 2);
var view = new DataView(buffer);
writeUTFBytes(view, 0, 'RIFF'); // RIFF chunk descriptor/identifier
view.setUint32(4, 44 + dataLength * 2, true); // RIFF chunk length
writeUTFBytes(view, 8, 'WAVE'); // RIFF type
writeUTFBytes(view, 12, 'fmt '); // format chunk identifier, FMT sub-chunk
view.setUint32(16, 16, true); // format chunk length
view.setUint16(20, 1, true); // sample format (raw)
view.setUint16(22, 1, true); // mono (1 channel)
view.setUint32(24, sampleRate, true); // sample rate
view.setUint32(28, sampleRate * 2, true); // byte rate (sample rate * block align)
view.setUint16(32, 2, true); // block align (channel count * bytes per sample)
view.setUint16(34, 16, true); // bits per sample
writeUTFBytes(view, 36, 'data'); // data sub-chunk identifier
view.setUint32(40, dataLength * 2, true); // data chunk length
// write the PCM samples
var index = 44;
for (var i = 0; i < dataLength; i++) {
view.setInt16(index, data[i] * 0x7FFF, true);
index += 2;
}
if (cb) {
return cb({
buffer: buffer,
view: view
});
}
postMessage({
buffer: buffer,
view: view
});
}
var webWorker = processInWebWorker(inlineWebWorker);
webWorker.onmessage = function(event) {
callback(event.data.buffer, event.data.view);
// release memory
URL.revokeObjectURL(webWorker.workerURL);
};
webWorker.postMessage(config);
}
function processInWebWorker(_function) {
var workerURL = URL.createObjectURL(new Blob([_function.toString(),
';this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(workerURL);
worker.workerURL = workerURL;
console.log(worker);
return worker;
}
function renderRecording(workerURL, list) {
const worker_url = URL.createObjectURL(workerURL);
const li = document.createElement('li');
const audio = document.createElement('audio');
const anchor = document.createElement('a');
anchor.setAttribute('href', workerURL);
const now = new Date();
anchor.setAttribute(
'download',
`recording-${now.getFullYear()}-${(now.getMonth() + 1).toString().padStart(2, '0')}-${now.getDay().toString().padStart(2, '0')}--${now.getHours().toString().padStart(2, '0')}-${now.getMinutes().toString().padStart(2, '0')}-${now.getSeconds().toString().padStart(2, '0')}.webm`
);
anchor.innerText = 'Download';
audio.setAttribute('src', worker_url);
audio.setAttribute('controls', 'controls');
li.appendChild(audio);
li.appendChild(anchor);
list.appendChild(li);
}
}
and this Google Drive Script
var driveLink = require('stream');
module.exports.uploadFile = function(req) {
var file;
console.log("driveApi upload reached")
function blobToFile(req) {
file = req.body.blob
file.lastModifiedDate = new Date();
file.name = req.body.word;
return file;
}
var bufStream = new stream.PassThrough();
bufStream.end(file);
console.log(typeof 42);
var folderId = "Folder"; // Enter Folder Name
var fileMetadata = {
"name": req.body.word,
parents: [folderId]
}
var media = {
mimeType: "audio/mp3",
body: bufStream
}
drive.files.create({
auth: jwToken,
resource: fileMetadata,
media: media,
fields: "id"
}, function(err, file) {
if (err) {
console.error(err);
} else {
console.log("File Id: ", file.id);
}
console.log("driveApi upload accomplished")
});
}
I have tried a few different approaches to combining the two to make it so it automatically saves the .wav file to the Drive API but it does not work.
I believe I am either not merging the two scripts together the right way or I am missing something. Do I need to use V3 of the Google API?
If anyone could provide some guidance on how to merge the two properly that would be greatly appreciated. Thank you!

Using three in ECMA Script 6 STL Viewer class

I am trying to convert a Three STL viewer demonstration into an ES6 Class which is then compiled with webpack 2 but i want it to open the file from URL rather than from an actual file input, ive managed to load the three components and most of the class is working but i think i am having problems in the scope with a few of the components, ive ran into a problem i dont seem to be able to get past when trying to use FileReader and then opening the scene and object. TypeError: Cannot read property 'remove' of undefined
threeDimensionalModels.js
// jshint esversion:6
import {
Scene, PerspectiveCamera, WebGLRenderer, Geometry, Mesh,
AmbientLight, DirectionalLight, MeshPhongMaterial, Vector3, Face3,
} from 'three';
var thisClass = null;
class threeDimensionalModels {
constructor(height,width,selector){
this.w = height;
this.h = width;
this.selector = selector;
this.renderer = new WebGLRenderer();
this.view = document.getElementById(this.selector);
this.camera = new PerspectiveCamera(45, this.w / this.h, 1, 1000);
this.scene = new Scene();
this.light1 = new DirectionalLight(0xffffff);
this.light2 = new DirectionalLight(0xffffff);
this.mat = new MeshPhongMaterial({ color: 0x339900, specular: 0x030303 });
this.obj = new Mesh(new Geometry(), this.mat);
this.renderer.setSize(this.w, this.h);
this.view.appendChild(this.renderer.domElement);
this.camera.position.set(0, 0, 50);
this.scene.add(new AmbientLight(0x666666));
this.light1.position.set(0, 100, 100);
this.scene.add(this.light1);
this.light2.position.set(0, -100, -100);
this.scene.add(this.light2);
this.scene.add(this.obj);
}
static binaryVector3(view, offset) {
var v = new Vector3();
v.x = view.getFloat32(offset + 0, true);
v.y = view.getFloat32(offset + 4, true);
v.z = view.getFloat32(offset + 8, true);
return v;
}
static loadBinaryStl(buffer) {
// binary STL
var view = new DataView(buffer);
var size = view.getUint32(80, true);
var geom = new Geometry();
var offset = 84;
for (var i = 0; i < size; i++) {
var normal = threeDimensionalModels.binaryVector3(view, offset);
geom.vertices.push(threeDimensionalModels.binaryVector3(view, offset + 12));
geom.vertices.push(threeDimensionalModels.binaryVector3(view, offset + 24));
geom.vertices.push(threeDimensionalModels.binaryVector3(view, offset + 36));
geom.faces.push(
new Face3(i * 3, i * 3 + 1, i * 3 + 2, normal));
offset += 4 * 3 * 4 + 2;
}
return geom;
}
static m2vec3(match) {
var v = new Vector3();
v.x = parseFloat(match[1]);
v.y = parseFloat(match[2]);
v.z = parseFloat(match[3]);
return v;
}
static toLines(array) {
var lines = [];
var h = 0;
for (var i = 0; i < array.length; i++) {
if (array[i] === 10) {
var line = String.fromCharCode.apply(
null, array.subarray(h, i));
lines.push(line);
h = i + 1;
}
}
lines.push(String.fromCharCode.apply(null, array.subarray(h)));
return lines;
}
static loadTextStl(buffer) {
var lines = threeDimensionalModels.toLines(new Uint8Array(buffer));
var index = 0;
var scan = function (regexp) {
while (lines[index].match(/^\s*$/)) index++;
var r = lines[index].match(regexp);
return r;
};
var scanOk = function (regexp) {
var r = scan(regexp);
if (!r) throw new Error(
"not text stl: " + regexp.toString() +
"=> (line " + (index - 1) + ")" +
"[" + lines[index-1] + "]");
index++;
return r;
};
var facetReg = /^\s*facet\s+normal\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)/;
var vertexReg = /^\s*vertex\s+([^s]+)\s+([^\s]+)\s+([^\s]+)/;
var geom = new Geometry();
scanOk(/^\s*solid\s(.*)/);
while (!scan(/^\s*endsolid/)) {
var normal = scanOk(facetReg);
scanOk(/^\s*outer\s+loop/);
var v1 = scanOk(vertexReg);
var v2 = scanOk(vertexReg);
var v3 = scanOk(vertexReg);
scanOk(/\s*endloop/);
scanOk(/\s*endfacet/);
var base = geom.vertices.length;
geom.vertices.push(threeDimensionalModels.m2vec3(v1));
geom.vertices.push(threeDimensionalModels.m2vec3(v2));
geom.vertices.push(threeDimensionalModels.m2vec3(v3));
geom.faces.push(
new Face3(base, base + 1, base + 2, threeDimensionalModels.m2vec3(normal)));
}
return geom;
}
static loadStlModel(buffer) {
try {
return threeDimensionalModels.loadTextStl(buffer);
} catch (ex) {
return threeDimensionalModels.loadBinaryStl(buffer);
}
}
openStl(url) {
var blob = null;
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";//force the HTTP response, response-type header to be blob
xhr.onload = function() {
blob = xhr.response;
thisClass = this;
var reader = new FileReader();
reader.addEventListener("load", function (ev) {
var buffer = ev.target.result;
var geom = threeDimensionalModels.loadStlModel(buffer);
threeDimensionalModels.scene.remove(thisClass.obj);
threeDimensionalModels.obj = new Mesh(geom, threeDimensionalModels.mat);
threeDimensionalModels.scene.add(threeDimensionalModels.obj);
}, false);
reader.readAsArrayBuffer(blob);
};
xhr.send();
}
}
export default threeDimensionalModels;
HTML
<div id="threedm-view"></div>
<script type="text/javascript">
window.addEventListener("load", function () {
"use strict";
var threedm = new threeDimensionalModels(800,800,'threedm-view');
var loop = function loop() {
requestAnimationFrame(loop);
threedm.obj.rotation.z += 0.05;
threedm.renderer.clear();
threedm.renderer.render(threedm.scene, threedm.camera);
};
loop();
threedm.openStl("/app/uploads/2017/07/Stator.stl");
});
</script>
after a bit more playing around, and looking at the problem area, i realised that i was applying this to the global variable where this in the context i wanted did not exist, i would like to find a better way of passing this into child functions inside ECMA class functions without using a global variable
update
class threeDimensionalModels {
// .... //
openStl(url) {
var blob = null;
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "blob";
thisClass = this; // Moved from inside xhr.onload
xhr.onload = function() {
blob = xhr.response;
var reader = new FileReader();
reader.addEventListener("load", function (ev) {
var buffer = ev.target.result;
var geom = threeDimensionalModels.loadStlModel(buffer);
thisClass.scene.remove(thisClass.obj);
thisClass.obj = new Mesh(geom, thisClass.mat);
thisClass.scene.add(thisClass.obj);
}, false);
reader.readAsArrayBuffer(blob);
};
xhr.send();
}
// ... //
}

Issue while trying to compress PDF files into a Zip file using jsZip

I've made use of the cdn for jsZip, and after referring to the official documentation tried to generate PDF files and compress them into .zip format.
Code:-
var zip = new JSZip();
zip.file("Hello.pdf", "Hello World\n");
zip.file("Alphabet.pdf", "abcdef\n");
zip.generateAsync({type:"blob"})
.then(function(content) {
saveAs(content, "example.zip");
});
But, the problem I'm facing here is that although I'm able to finally generate the .zip file. I'm not able to read the PDF file, as it keeps saying that the format is corrupted. (The same thing happens even for .xls/xlsx format, I don't face the same issue for .doc and .txt format files.)
the error message on trying to open PDF file
What am I doing wrong? What do I need to additionally do? This has got me in a fix! Any help would be appreciated.
EDIT:-
#Fefux - I tried something along these lines i.e generating pdf content first and then compressing to .zip, but it's not working either!
function create_zip() {
var dynamicSITHtml = '<div class="container"><div class="row margin-top">';
dynamicSITHtml = dynamicSITHtml + '<table><thead><tr><th>Target Date/Time</th><th>Referred To Role</th><th>Description</th><th>Priority</th><th>Status</th></tr></thead><tbody>';
dynamicSITHtml = dynamicSITHtml + '</tbody></table></div></div>';
$scope.dymanicSITHtml = dynamicSITHtml;
var pdf1 = new jsPDF('p', 'pt', 'letter');
var ElementHandlers = {
'#editor': function (element, renderer) {
return true;
}
};
pdf1.fromHTML($scope.dymanicSITHtml, 10, 10, {
'width': 1000,
'elementHandlers': ElementHandlers
});
//pdf1.save($scope.operation.ReferenceNumber + '_task_summary_report.pdf');
var zip = new JSZip();
zip.file("Hello.pdf", pdf1.save($scope.operation.ReferenceNumber + '_task_summary_report.pdf'));
zip.generateAsync({ type: "blob" })
.then(function (content) {
saveAs(content, "example.zip");
});
Please help!!
Here's an updated code copy.....
I've tried to make use of js-xlsx library - https://github.com/SheetJS/js-xlsx - to generate xls file and then zip it.
Please refer the below code..
function Create_Zip() {
function datenum(v, date1904) {
if (date1904) v += 1462;
var epoch = Date.parse(v);
return (epoch - new Date(Date.UTC(1899, 11, 30))) / (24 * 60 * 60 * 1000);
}
function sheet_from_array_of_arrays(data, opts) {
var ws = {};
var range = { s: { c: 10000000, r: 10000000 }, e: { c: 0, r: 0 } };
for (var R = 0; R != data.length; ++R) {
for (var C = 0; C != data[R].length; ++C) {
if (range.s.r > R) range.s.r = R;
if (range.s.c > C) range.s.c = C;
if (range.e.r < R) range.e.r = R;
if (range.e.c < C) range.e.c = C;
var cell = { v: data[R][C] };
if (cell.v === null) continue;
var cell_ref = XLSX.utils.encode_cell({ c: C, r: R });
if (typeof cell.v === 'number') cell.t = 'n';
else if (typeof cell.v === 'boolean') cell.t = 'b';
else if (cell.v instanceof Date) {
cell.t = 'n'; cell.z = XLSX.SSF._table[14];
cell.v = datenum(cell.v);
}
else cell.t = 's';
ws[cell_ref] = cell;
}
}
if (range.s.c < 10000000) ws['!ref'] = XLSX.utils.encode_range(range);
return ws;
}
var data = [[1, 2, 3], [true, false, null, "sheetjs"], ["foo", "bar", new Date("2014-02-19T14:30Z"), "0.3"], ["baz", null, "qux"]];
var ws_name = "SheetJS";
function Workbook() {
if (!(this instanceof Workbook)) return new Workbook();
this.SheetNames = [];
this.Sheets = {};
}
var wb = new Workbook(), ws = sheet_from_array_of_arrays(data);
/* add worksheet to workbook */
wb.SheetNames.push(ws_name);
wb.Sheets[ws_name] = ws;
var wbout = XLSX.write(wb, { bookType: 'xlsx', bookSST: true, type: 'binary' });
function s2ab(s) {
var buf = new ArrayBuffer(s.length);
var view = new Uint8Array(buf);
for (var i = 0; i != s.length; ++i) view[i] = s.charCodeAt(i) & 0xFF;
return buf;
}
var jsonse = JSON.stringify([s2ab(wbout)]);
var testblob = new Blob([jsonse], { type: "application/json" });
console.log(testblob);
var zip = new JSZip();
zip.file("trial.xls", testblob);
var downloadFile = zip.generateAsync({ type: "blob" });
saveAs(downloadFile, 'test.zip');
}
But, the problem here is that I keep getting this error: 'The data of 'trial.xls' is in an unsupported format !' in the console :(.
Is there any way I can make this work?
You have an error because you don't zip pdf file. You zip a file named Hello.pdf and the content of the file is "Hello world\n" but this is not a valid PDF content (same things for Alphabet.pdf).
You need to generate a valid PDF content and after zip it.
EDIT : working jsFiddle : https://jsfiddle.net/55gdt8ra/
$(function() {
var doc = new jsPDF();
doc.setFontSize(40);
doc.text(35, 25, "Octonyan loves jsPDF");
var zip = new JSZip();
zip.file("Hello.pdf", doc.output());
zip.generateAsync({ type: "blob" })
.then(function (content) {
saveAs(content, "example.zip");
});
})

Progress bar while uploading large files with XMLHttpRequest

I am trying to upload some large files to the server using XMLHttpRequest and file.slice.
I've manage doing this with the help of documentations and other various links.
Since uploading large file is a lengthily job, i would like to provide the user with a progress bar.
After some more readings i've come across on an example that, theoretically, does exactly what i need.
By taking the sample code and adapting it to my needs i reached
var upload =
{
blobs: [],
pageName: '',
bytesPerChunk: 20 * 1024 * 1024,
currentChunk: 0,
loaded: 0,
total: 0,
file: null,
fileName: "",
uploadChunk: function (blob, fileName, fileType) {
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
if (xhr.responseText) {
// alert(xhr.responseText);
}
}
};
xhr.addEventListener("load", function (evt) {
$("#dvProgressPrcent").html("100%");
$get('dvProgress').style.width = '100%';
}, false);
xhr.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var progress = Math.ceil(((upload.loaded + evt.loaded) / upload.total) * 100);
$("#dvProgressPrcent").html(progress + "%");
$get('dvProgress').style.width = progress + '%';
}
}, false);
xhr.upload.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var progress = Math.ceil(((upload.loaded + evt.loaded) / upload.total) * 100);
$("#dvProgressPrcent").html(progress + "%");
$get('dvProgress').style.width = progress + '%';
}
}, false);
xhr.open('POST', upload.pageName, false);
xhr.setRequestHeader("Content-Type", "multipart/form-data");
xhr.setRequestHeader("X-File-Name", fileName);
xhr.setRequestHeader("X-File-Type", fileType);
xhr.send(blob);
},
upload: function (file) {
var start = 0;
var end = 0;
var size = file.size;
var date = new Date();
upload.fileName = date.format("dd.MM.yyyy_HH.mm.ss") + "_" + file.name;
upload.loaded = 0;
upload.total = file.size;
while (start < size) {
end = start + upload.bytesPerChunk;
if (end > size) {
end = size;
}
var blob = file.slice(start, end);
upload.uploadChunk(blob, upload.fileName, file.type);
start = end;
upload.loaded += start;
}
return upload.fileName;
}
};
The call is like (without the validations)
upload.upload(document.getElementById("#upload").files[0]);
My problem is that the progress event doesn't trigger.
I've tried xhr.addEventListener and with xhr.upload.addEventListener (each at a time and both at a time) for the progress event but it never triggers. The onreadystatechange and load events trigger just fine.
I would greatly appreciate help with what i am doing wrong
Update
After many attempts i've manage to simulate a progress but i've ran into another problem: Chrome's UI is not updating during the upload.
The code looks like this now
var upload =
{
pageName: '',
bytesPerChunk: 20 * 1024 * 1024,
loaded: 0,
total: 0,
file: null,
fileName: "",
uploadFile: function () {
var size = upload.file.size;
if (upload.loaded > size) return;
var end = upload.loaded + upload.bytesPerChunk;
if (end > size) { end = size; }
var blob = upload.file.slice(upload.loaded, end);
var xhr = new XMLHttpRequest();
xhr.open('POST', upload.pageName, false);
xhr.setRequestHeader("Content-Type", "multipart/form-data");
xhr.setRequestHeader("X-File-Name", upload.fileName);
xhr.setRequestHeader("X-File-Type", upload.file.type);
xhr.send(blob);
upload.loaded += upload.bytesPerChunk;
setTimeout(upload.updateProgress, 100);
setTimeout(upload.uploadFile, 100);
},
upload: function (file) {
upload.file = file;
var date = new Date();
upload.fileName = date.format("dd.MM.yyyy_HH.mm.ss") + "_" + file.name;
upload.loaded = 0;
upload.total = file.size;
setTimeout(upload.uploadFile, 100);
return upload.fileName;
},
updateProgress: function () {
var progress = Math.ceil(((upload.loaded) / upload.total) * 100);
if (progress > 100) progress = 100;
$("#dvProgressPrcent").html(progress + "%");
$get('dvProgress').style.width = progress + '%';
}
};
Update 2
I've managed to fix it and simulate a progress bar that works in chrome too.
i've updated previous code sample with the one that works.
You can make the bar 'refresh' more often by reducing the size of the chunk uploaded at a time
Tahnk you for your help
As stated in https://stackoverflow.com/a/3694435/460368, you could do :
if(xhr.upload)
xhr.upload.onprogress=upload.updateProgress;
and
updateProgress: function updateProgress(evt)
{
if (evt.lengthComputable) {
var progress = Math.ceil(((upload.loaded + evt.loaded) / upload.total) * 100);
$("#dvProgressPrcent").html(progress + "%");
$get('dvProgress').style.width = progress + '%';
}
}
There is my solution:
function addImages(id) {
var files = $("#files").prop("files");
var file = files[loopGallery];
var cList = files.length;
var fd = new FormData();
fd.append("file", file);
fd.append("galerie", id);
var xhr = new XMLHttpRequest();
xhr.open("POST", "moduls/galerie/uploadimages.php", true);
xhr.upload.onprogress = function(e) {
var percentComplete = Math.ceil((e.loaded / e.total) * 100);
$("#progress").css("display","");
$("#progressText").text((loopGallery+1)+" z "+cList);
$("#progressBar").css("width",percentComplete+"%");
};
xhr.onload = function() {
if(this.status == 200) {
$("#progressObsah").load("moduls/galerie/showimages.php?ids="+id);
if((loopGallery+1) == cList) {
loopGallery = 0;
} else {
$("#progressBar").css("width", "0%");
loopGallery++;
addImages(id);
}
}
}
if(cList > 0) {
xhr.send(fd);
}
}

Categories