Generate Blurhash code in Next JS app for a dynamic images - javascript

i'm using next JS and dropzone to upload an image and send the images to a server. i learned that next JS uses blur hash for static images when used as 'blur' in placeholder. but when it comes to dynamic images fetched from server it needs to be passed the blurhash as well.
so is there a way that i could generate BlurHash myself when i upload images and send them to server? i don't want to use API to generate these hash codes. just pure javascript
dropzone accepted images
const dispatch = useDispatch();
const images = useSelector(selectImages);
const [files, setFiles] = useState(images == [] ? [] : images);
const {getRootProps, getInputProps} = useDropzone({
onDrop: (acceptedFiles) => {
acceptedFiles.map((file, index) => {
const reader = new FileReader();
reader.onload = async function (e) {
const options = {
maxSizeMB: 5,
maxWidthOrHeight: 1920,
useWebWorker: true,
};
const compressedFile = await imageCompression(file, options);
const tot = parseInt(acceptedFiles.length) + parseInt(files.length);
if (tot > 9) {
alert("select maximum of 9 images");
} else if (parseInt(acceptedFiles.length) > 9) {
alert("maximum images to be selected is 9");
} else if (parseInt(files.length) < 9) {
setFiles((prevState) => [
...prevState,
{
id: index,
src: URL.createObjectURL(compressedFile),
name: file.name,
},
]);
files.map((filename) => {
acceptedFiles.forEach((newFile) => {
if (newFile.name == filename.name) {
alert("a duplicate image is detected");
setFiles(
files,
files.filter((val) => val !== newFile)
);
}
});
});
} else {
alert("something went wrong");
}
};
reader.readAsDataURL(file);
return file;
});
},
})
and the output when uploaded

Related

How to use localstorage in multiple async await?

I'm making multiple calls on an endpoint to get the image of a blob and I need to save it in localstorage so that if it exists there, it's not necessary to make the call again.
The way I'm doing it, it's not waiting for the keys to spread the setItem array.
What is the correct way to do this?
assetNameFiltered.forEach((assetName) => {
const ASSET_IMAGE = assetImageCached?.filter(({ id }) => id === assetName)[0];
if (ASSET_IMAGE) {
//
} else {
useOperationServiceHook.getAssetImageByName(assetName).then(({ data }) => {
if (data.size > 0) {
const READER = new window.FileReader();
READER.readAsDataURL(data);
READER.onloadend = () => createAssetImageCache({ id: assetName, image: READER.result });
} else {
createAssetImageCache({ id: assetName, image: '' });
}
});
}
});

How do I download/get an image from google drive api and render it on my web app?

My task is to let a user sign into their google drive, select an image, and render it on my web application. I am able to download the file onto the local machine and I am also able to view and render the thumbnail (low quality) on my app. However, when I try to render the full sized image I get a CORS no "Access Control Allow Origin" error.
Some background - thumbnail image links that work and render correctly have a "lh3" in their domain name while the fill sized images have a "drive.google" in their domain name. My colleague suggested an express endpoint to solve this (but I have no idea about express and Node.js) while someone else suggested to get the file from the google drive api as a blob, download it onto the server and then upload it from the server onto the front end of the web app.
class GoogleDrive {
constructor () {
this.thumbID = new Map();
}
startImport (choice, renderThumb) {
gapi.load('client:auth2', this.initClient.bind(this, choice, renderThumb));
}
async initClient (choice, renderThumb) {
const CLIENT_ID = '###';
const API_KEY = '###';
const DISCOVERY_DOCS = ["https://www.googleapis.com/discovery/v1/apis/drive/v3/rest"];
const SCOPES = 'https://www.googleapis.com/auth/drive';
await gapi.client.init({
apiKey: API_KEY,
clientId: CLIENT_ID,
discoveryDocs: DISCOVERY_DOCS,
scope: SCOPES
})
// Listen for sign-in state changes.
gapi.auth2.getAuthInstance().isSignedIn.listen(this.updateSigninStatus.bind(this, choice, renderThumb));
// Handle the initial sign-in state.
this.updateSigninStatus(gapi.auth2.getAuthInstance().isSignedIn.get(), choice, renderThumb);
}
updateSigninStatus (isSignedIn, choice, renderThumb) {
if (isSignedIn) {
this.getFiles(choice, renderThumb);
} else {
gapi.auth2.getAuthInstance().signIn();
}
}
handleSignOut () {
gapi.auth2.getAuthInstance().signOut();
}
getFiles (choice, renderThumb) {
let q = "";
if (choice === 0) {
q = "mimeType contains 'image/png'";
} else {
q = "mimeType = 'application/pdf'"
}
gapi.client.drive.files.list({
'pageSize': 10,
'q':q,
'fields': "files(id, name)"
}).then(async (response) => {
let files = response.result.files;
if (files && files.length > 0) {
for (let i = 0; i < files.length; i++) {
let file = files[i];;
let result = await gapi.client.drive.files.get({fileId: file.id, fields: '*'});
file.thumb = result.result.thumbnailLink;
file.webView = result.result.webViewLink;
this.thumbID.set(file.thumb, result.result);
renderThumb(file.thumb, file.webView, result.result.name);
}
} else {
console.log('No files found.');
}
});
}
}
module.exports = new GoogleDrive();
_onDrop (e) {
eatEvent(e);
let droppedFiles;
if(e.dataTransfer.files.length){
droppedFiles = e.dataTransfer.files;
}else{
droppedFiles = [e.dataTransfer.items[0].getAsString((url)=>
{
this.createBlob(url, e);
})];
return;
}
async createBlob (url, e){
let result = GoogleDrive.thumbID.get(url);
let file = await gapi.client.drive.files.get({
fileId: result.id,
alt: 'media'
})
this._processDroppedFiles(file, e.pageX, e.pageY);
}
_processDroppedFiles(droppedFiles, x = undefined, y = undefined) {
if(DocumentData.isReadOnly) return;
let files = droppedFiles;
if (files.length === 0) {
// show dialog: unsupported format
System.emit(
System.Types.SHOW_DIALOG,
new ErrorDialog(
ErrorDialog.ID.UPLOAD,
f(Strings.UPLOAD_ERROR),
f(Strings.UPLOAD_ONLY_IMAGES)
)
);
}
else {
System.emit(System.Types.START_LOADING);
// only first file
let firstFile = files;//[0];
// a single image file
this._uploadAndLoadImage(firstFile, firstFile.name)
.then(obj => this._loadImage(obj, x, y))
.then(()=> System.emit(System.Types.STOP_LOADING))
.catch(e => {
console.warn('upload failed');
console.error(e);
System.emit(System.Types.STOP_LOADING);
});
}
}
}

Firebase cloud function storage trigger first thumbnail urls are fine then the next ones are all the same thumbnails urls as the first

I am trying to upload an image to firebase and then produce 2 thumbnails. I am able to do this with no problems. My current road block is when I write the urls to the realtime database, I am always getting the same url as the initial upload.
For example:
1st upload I get my uploaded image with the two proper thumbnails for the image
2nd upload I get my uploaded image with the two previous thumbnails (first image)
3rd upload I get my uploaded image with the first images thumbnails...
...this continues to reproduce the urls for the first upload
In my storage the correct thumbnails are being generated, but the urls are always for the first upload?
I don't know if this is a problem with the getSignedUrl() or not, really not sure whats going on here.
Here is my cloud function:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket); // The Storage object.
// console.log(object);
console.log(object.name);
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Sounrce fileName
await bucket.file(filePath).download({
destination: tmpFilePath
});
//3. resize the images and define an array of upload promises
const sizes = [64, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
//Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
//upload to gcs
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
// console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}).then((response) => {
const url = response[0];
if (size === 64) {
// console.log('generated 64');
return admin.database().ref('profileThumbs').child(fileName).set({ thumb: url });
} else {
// console.log('generated 128');
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
.catch(function (error) {
console.error(err);
return;
});
})
});
//4. Run the upload operations
await Promise.all(uploadPromises);
//5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
})
Cleaned up my code and solved my problem, here is how I generated the urls and passed them to the proper URLs by accessing the users UID and postId in the file path:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const userUid = filePath.split('/')[2];
const sizes = [64, 256];
const bucketDir = dirname(filePath);
console.log(userUid);
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(tmpdir(), fileName);
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
sizes.map(size => {
const newFileName = `thumb#${size}_${fileName}.png`
const newFileTemp = path.join(tmpdir(), newFileName);
const newFilePath = `thumbs/${newFileName}`
return sharp(tempFilePath)
.resize(size, size)
.toFile(newFileTemp, () => {
return bucket.upload(newFileTemp, {
destination: join(bucketDir, newFilePath),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
console.log(data)
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}, function(err, url) {
console.log(url);
if (err) {
console.error(err);
return;
}
if (size === 64) {
return admin.database().ref('profileThumbs').child(userUid).child(fileName).set({ thumb: url });
} else {
return admin.database().ref('categories').child(fileName).child('thumb').set(url);
}
})
})
})
})
}).catch(error =>{
console.log(error);
});
})

Issues while uploading an image to firebase storage with Antd upload action

I'm using antd picture-wall/card example to upload images to my firebase storage with this reference code and the only place I'm changing is action property on <Upload> component.
On the action property, I'm using a function that uploads the images to firebase storage instead of a link both are accepted as seen in docs.
My action function looks like this;
export async function uploadImage(file) {
const storage = firebase.storage()
const metadata = {
contentType: 'image/jpeg'
}
const storageRef = await storage.ref()
const imageName = generateHashName() //a unique name for the image
const imgFile = storageRef.child(`Vince Wear/${imageName}.png`)
return imgFile.put(file, metadata)
}
Issue comes, The image uploads to firebase successfully, but I keep getting antd response handling errors and possibly not sure what action function should return, even though, is written in the docs that it should return a promise.
Error message:
XML Parsing Error: syntax error
Location: http://localhost:3000/[object%20Object]
Line Number 1, Column 1:
Errors also appear as a red border on the uploaded image thumbnail.
Requested help, What should my action function return to get rid of errors. I can parse my firebase response and return the necessary details to antd upload action.
Using
"antd": "^3.9.2",
"firebase": "^5.8.5",
"react": "^16.7.0",
You can use customRequest prop to fix this issue. Have a look
class CustomUpload extends Component {
state = { loading: false, imageUrl: '' };
handleChange = (info) => {
if (info.file.status === 'uploading') {
this.setState({ loading: true });
return;
}
if (info.file.status === 'done') {
getBase64(info.file.originFileObj, imageUrl => this.setState({
imageUrl,
loading: false
}));
}
};
beforeUpload = (file) => {
const isImage = file.type.indexOf('image/') === 0;
if (!isImage) {
AntMessage.error('You can only upload image file!');
}
// You can remove this validation if you want
const isLt5M = file.size / 1024 / 1024 < 5;
if (!isLt5M) {
AntMessage.error('Image must smaller than 5MB!');
}
return isImage && isLt5M;
};
customUpload = ({ onError, onSuccess, file }) => {
const storage = firebase.storage();
const metadata = {
contentType: 'image/jpeg'
}
const storageRef = await storage.ref();
const imageName = generateHashName(); //a unique name for the image
const imgFile = storageRef.child(`Vince Wear/${imageName}.png`);
try {
const image = await imgFile.put(file, metadata);
onSuccess(null, image);
} catch(e) {
onError(e);
}
};
render () {
const { loading, imageUrl } = this.state;
const uploadButton = (
<div>
<Icon type={loading ? 'loading' : 'plus'} />
<div className="ant-upload-text">Upload</div>
</div>
);
return (
<div>
<Upload
name="avatar"
listType="picture-card"
className="avatar-uploader"
beforeUpload={this.beforeUpload}
onChange={this.handleChange}
customRequest={this.customUpload}
>
{imageUrl ? <img src={imageUrl} alt="avatar" /> : uploadButton}
</Upload>
</div>
);
}
}
Just leaving this here incase anyone wanted to track the progress of the file aswell
const customUpload = async ({ onError, onSuccess, file, onProgress }) => {
let fileId = uuidv4()
const fileRef = stg.ref('demo').child(fileId)
try {
const image = fileRef.put(file, { customMetadata: { uploadedBy: myName, fileName: file.name } })
image.on(
'state_changed',
(snap) => onProgress({ percent: (snap.bytesTransferred / snap.totalBytes) * 100 }),
(err) => onError(err),
() => onSuccess(null, image.metadata_)
)
} catch (e) {
onError(e)
}
}

javascript FileReader - how to parsing long file in chunks?

Initially, I have made loading here so like this
export function сonvertFilesToByteArray(e) {
const MAX_FILE_SIZE = 1024 * 1024 * 50; // 50MB
const files = Object.keys(e.target.files);
const asyncReadFile = eachFile =>
new Promise((resolve, reject) => {
if (e.target.files[eachFile].size > MAX_FILE_SIZE) {
return reject([{ message: `File ${e.target.files[eachFile].name} too large` }]);
}
const reader = new FileReader();
const targetFileInfo = {
contentType: e.target.files[eachFile].type,
filename: e.target.files[eachFile].name,
};
reader.readAsArrayBuffer(e.target.files[eachFile]);
reader.onload = () => {
resolve({ ...targetFileInfo, body: Array.from(new Uint8Array(reader.result)) });
};
reader.onerror = error => reject(error);
});
return Promise.all(files.map(asyncReadFile));
}
Here in the constant files, I define how many at my files and I apply a function to each of them.
And then I get my file(s) in the component
handleFileUpload = (e) => {
сonvertFilesToByteArray(e)
.then((result) => {
runInAction(() => {
this.files = [
...this.files,
...result,
];
});
})
.catch(err => runInAction(() => {
this.errors = [...this.errors, err[0].message];
}));
}
And put in this.files and finally my this.files looks like [{contentType: 'plain/text', filename: 'blabla', body: [123, 456, 23, ...] }]
Where [123, 456, 23...] there is my ArrayBuffer
But at such approach in spite of the fact that I use Promise.all, when loading files/files which have weight more ~ 2MB, the page is frozen, it is impossible to interact with her in any way (but I can scroll). Except as realization when each file are divided into chunks nothing has come to mind to correct a situation.
Ok, I try to rewrite the code: With chunks
export function сonvertFilesToByteArray(e) {
const MAX_FILE_SIZE = 1024 * 1024 * 50; // 50MB
const files = Object.keys(e.target.files);
const asyncReadFile = eachFile =>
new Promise((resolve, reject) => {
if (e.target.files[eachFile].size > MAX_FILE_SIZE) {
return reject([{ message: `File ${e.target.files[eachFile].name} too large` }]);
}
const file = e.target.files[eachFile];
let offset = 0;
console.log(offset, 'offset', file.size, 'size');
const defaultChunkSize = 64 * 1024; // bytes
const fileReader = new FileReader();
const blob = file.slice(offset, offset + defaultChunkSize);
const isEndOfFile = () => offset >= file.size;
const testEndOfFile = () => {
if (isEndOfFile()) {
console.log('Done reading file');
}
};
fileReader.readAsArrayBuffer(blob);
fileReader.onloadend = (event) => {
const target = (event.target);
if (target.error == null) {
const result = target.result;
offset += result.length;
testEndOfFile();
console.log(result, 'result');
resolve(result);
} else {
reject(target.error);
}
};
});
return Promise.all(files.map(asyncReadFile));
}
Here I receive the file and I divide it. But the problem is that if the file is more than a chunk, then I should bring together him from them again and again. But how to make it in my case? I can't understand it in any way...
Please help me :) What it is necessary to make to read the file in chunks and to receive it as ArrayBuffer?

Categories