ASP Net Core Filepond load file from server - javascript

I have a project where it uses Filepond to upload files and I need it to load file from server.
I already follow the docs but It doesn't work. The Filepond gives error Error during load 400 and it even doesn't send the request to load the file from server
This is my javascript
let pond = FilePond.create(value, {
files: [
{
// the server file reference
source: 'e958818e-92de-4953-960a-d8157467b766',
// set type to local to indicate an already uploaded file
options: {
type: 'local'
}
}
]
});
FilePond.setOptions({
labelFileProcessingError: (error) => {
return error.body;
},
server: {
headers: {
'#tokenSet.HeaderName' : '#tokenSet.RequestToken'
},
url: window.location.origin,
process: (fieldName, file, metadata, load, error, progress, abort) => {
// We ignore the metadata property and only send the file
fieldName = "File";
const formData = new FormData();
formData.append(fieldName, file, file.name);
const request = new XMLHttpRequest();
request.open('POST', '/UploadFileTemp/Process');
request.setRequestHeader('#tokenSet.HeaderName', '#tokenSet.RequestToken');
request.upload.onprogress = (e) => {
progress(e.lengthComputable, e.loaded, e.total);
};
request.onload = function () {
if (request.status >= 200 && request.status < 300) {
load(request.responseText);
}
else {
let errorMessageFromServer = request.responseText;
error('oh no');
}
};
request.send(formData);
},
revert: "/UploadFileTemp/revert/",
load: "/UploadFileTemp/load"
}
})
This is my controller
public async Task<IActionResult> Load(string p_fileId)
{
//Code to get the files
//Return the file
Response.Headers.Add("Content-Disposition", cd.ToString());
Response.Headers.Add("X-Content-Type-Options", "nosniff");
return PhysicalFile(filePath, "text/plain");
}
NB
I already test my controller via postman and it works. I also check the content-disposition header

I'd advise to first set all the options and then set the files property.
You're setting the files, and then you're telling FilePond where to find them, it's probably already trying to load them but doesn't have an endpoint (yet).
Restructuring the code to look like this should do the trick.
let pond = FilePond.create(value, {
server: {
headers: {
'#tokenSet.HeaderName': '#tokenSet.RequestToken',
},
url: window.location.origin,
process: (fieldName, file, metadata, load, error, progress, abort) => {
// your processing method
},
revert: '/UploadFileTemp/revert',
load: '/UploadFileTemp/load',
},
files: [
{
// the server file reference
source: 'e958818e-92de-4953-960a-d8157467b766',
// set type to local to indicate an already uploaded file
options: {
type: 'local',
},
},
],
});

Related

Uppy js is showing success, but the file is not actually uploaded to directory

I am using Laravel 9 with Uppy. I have an issue that when the upload is successful, the file isn’t actually stored in the destination:
I have a global file upload action that runs across the entire application:
class UploadImageAction implements UploadImageContract
{
public function handle(Request $request, $imageProperty, $image, $imageDir)
{
if ($request->hasFile($imageProperty)) {
// Handle uploading lf_image
if (!is_null($image) && Storage::exists($image)) {
// Throw exceptions here
Storage::delete($image);
}
// Throw exceptions here
return $request->file($imageProperty)->store($imageDir);
}
}
}
It was working fine before adding Uppy to the project, but I don’t know why the file/s aren’t being uploaded?
The files are being stored together with the form data in the controller like so:
public function storeReport(Request $request)
{
$report = new Report();
$report->fill($request->validated());
$report = $this->handleAttachments($request, $report); // Upload Image Action class here
$report->save();
if(!$report) {
throw new ReportException('Something went wrong with the request.' . $report);
}
return $report;
}
The files should show at public/app/report/attachments
This is the Uppy code:
uppy.use(Uppy.Form, {
target: '#reportsForm',
resultName: 'uppyResult',
getMetaFromForm: true,
submitOnSuccess: false,
triggerUploadOnSubmit: false,
});
uppy.use(Uppy.XHRUpload, {
endpoint: storeReport,
limit: 10,
formData: true,
fieldName: 'attachment',
allowedMetaFields: null,
headers: {
'X-CSRF-TOKEN': $('meta[name="csrf-token"]').attr('content')
}
});

Load file in chrome extension (manifest v3)

I have tried different ways to load a config.json file into a chrome extension, but none of them seems to work.
Tried using XHR: the response is empty
E.g:
var xhr = new XMLHttpRequest;
xhr.open("GET", chrome.runtime.getURL("config.json"));
xhr.onreadystatechange = function() {
if (this.readyState == 4) {
console.log("request finished")
console.log(xhr.responseText) // Returns empty
}
};
xhr.send();
Tried using file and didn't work while granting the web_accessible_resources to that file in the manifest file.
const url = chrome.runtime.getURL('config.json');
fetch(url)
.then((response) => {response.json()}) //assuming file contains json
.then((json) => doSomething(json));
Tried using this solution:
chrome.runtime.getPackageDirectoryEntry(function(root) {
root.getFile("config.json", {}, function(fileEntry) {
fileEntry.file(function(file) {
var reader = new FileReader();
reader.onloadend = function(e) {
var myFile = JSON.parse(this.result);
/*do here whatever with your JS object(s)*/
};
reader.readAsText(file);
});
});
});
I get getPackageDirectoryEntry undefined
The only solution that works is obviously to include the json as an object into the javascript contentScript file but that is against code modularity.
Anyone with solution ?
Edit:
This is my part of the manifest regarding the web_accessible_resources
"web_accessible_resources": [
{
"resources": [ "config.js"],
"matches": [ "https://mywebsite/*" ]
}
]

AWS S3 CreatePresignedPost not generating some of the required fields

I'm trying to generate a presigned post to give the browser privileges to upload / delete a specific file from a bucket, but it seems the createPresignedPost is not generating some of the required fields. getSignedUrl works.
const signedUrl = await new Promise<PresignedPost>( (resolve, reject) => {
this.s3.createPresignedPost({
Bucket: this.env.config.s3.buckets.images,
Fields: { key },
Conditions: [
["content-length-range", 0, 10 * 1024 * 1024]
],
Expires: 3600,
}, (err, preSigned) => { if (err) { reject(err) } else { resolve(preSigned) }});
});
// This works, but doesn't allow the object to be deleted, and does not allow setting a maximum file size
//
// const rawUrl = new URL(await this.s3.getSignedUrlPromise('putObject', {
// Bucket: this.env.config.s3.buckets.images,
// Key: key,
// Expires: 3600,
// }));
//
// const signedUrl = {
// url: rawUrl.origin + rawUrl.pathname,
// fields: Object.fromEntries(Array.from(rawUrl.searchParams.entries()))
// };
The createPresignedPost generates:
{
url: 'https://s3.eu-west-3.amazonaws.com/xxx-images',
fields: {
key: 'incoming/ae83pfxu7kf4dfdv4hbvorsxq31hadtjcp97ehwt30ds5',
bucket: 'xxx-images',
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': 'xxx',
'X-Amz-Date': '20200509T145014Z',
Policy:
'eyJleHBpcmF0aW9uIjoiMjAyMC0wNS0wOVQxNTo1MDoxNFoiLCJjb25kaXRpb25zIjpbWyJjb250ZW50LWxlbmd0aC1yYW5nZSIsMCwxMDQ4NTc2MF0seyJrZXkiOiJpbmNvbWluZy9hZTgzcGZ4dTdrZjRkZmR2NGhidm9yc3hxMzFoYWR0amNwOTdlaHd0MzBkczUifSx7ImJ1Y2tldCI6InByZWZsaWdodGVtYWlsLWltYWdlcyJ9LHsiWC1BbXotQWxnb3JpdGhtIjoiQVdTNC1ITUFDLVNIQTI1NiJ9LHsiWC1BbXotQ3JlZGVudGlhbCI6IkFLSUE1RE5UN0lOWjJKTU5TQVhILzIwMjAwNTA5L2V1LXdlc3QtMy9zMy9hd3M0X3JlcXVlc3QifSx7IlgtQW16LURhdGUiOiIyMDIwMDUwOVQxNDUwMTRaIn1dfQ==',
'X-Amz-Signature': 'xxx' } }
Trying to PUT a file with those parameters gives:
<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>AuthorizationQueryParametersError</Code>
<Message>Query-string authentication version 4 requires the X-Amz-Algorithm, X-Amz-Credential, X-Amz-Signature, X-Amz-Date, X-Amz-SignedHeaders, and X-Amz-Expires parameters.</Message>
<RequestId>xxx</RequestId>
<HostId>xxx</HostId>
</Error>
The older API call generates the missing 'X-Amz-SignedHeaders', and 'X-Amz-Expires' parameters too.
Can anyone help me in what am I doing wrong?
You should use POST instead of PUT since you are using createPresignedPost to generate the URL.

ExtJs - Save selected file from filefield

I want to upload a file using the extjs6 modern toolkit. Therefor I display a MessageBox with a file chooser. How can I retrieve the selected file into a javascript object after clicking the OK button to upload it (via HTTP POST e.g.)?
this.createUploadMsgBox("File Upload", function (clickedButton) {
if (clickedButton == 'ok') {
console.log("file: " + file);
}
createUploadMsgBox: function (title, callback) {
Ext.Msg.show({
title: title,
width: 300,
buttons: Ext.MessageBox.OKCANCEL,
fn: callback,
items: [
{
xtype: 'filefield',
label: "File:",
name: 'file'
}
]
});
}
You can rum my example here:
https://fiddle.sencha.com/#view/editor&fiddle/1kro
You have two posible solutions.
One is to use a form, and send the file via form.submit() (use form.isValid() before the submit). You can retrieve the file in the server with a MultipartFile.
The other way is to use JS File API. In you createUploadMsgBox function:
this.createUploadMsgBox("File Upload", function (clickedButton) {
if (clickedButton == 'ok') {
//console.log("file: " + file);
var filefield = Ext.ComponentQuery.query('filefield')[0];
var file = filefield.el.down('input[type=file]').dom.files[0];
var reader = new FileReader();
reader.onload = (function(theFile) {
return function(e) {
console.log(e.target.result);
};
})(file);
reader.readAsBinaryString(file);
}
});
In the file object you have the basic info of the file, and then you will see in the console the content of the file.
Hope this helps!

How to upload files to Amazon S3 with Meteor?

I'm trying to upload files to my Amazon S3 Bucket. S3 and amazon is set up.
This is the error message from Amazon:
Conflicting query string parameters: acl, policy
Policy and signature is encoded, with Crypto.js for Node.js
var crypto=Npm.require("crypto");
I'm trying to build POST request with Meteor HTTP.post method. This could be wrong as well.
var BucketName="mybucket";
var AWSAccessKeyId="MY_ACCES_KEY";
var AWSSecretKey="MY_SECRET_KEY";
//create policy
var POLICY_JSON={
"expiration": "2009-01-01T00:00:00Z",
"conditions": [
{"bucket": BucketName},
["starts-with", "$key", "uploads/"],
{"acl": 'public-read'},
["starts-with", "$Content-Type", ""],
["content-length-range", 0, 1048576],
]
}
var policyBase64=encodePolicy(POLICY_JSON);
//create signature
var SIGNATURE = encodeSignature(policyBase64,AWSSecretKey);
console.log('signature: ', SIGNATURE);
This is the POST request I'm using with Meteor:
//Send data----------
var options={
"params":{
"key":file.name,
'AWSAccessKeyId':AWSAccessKeyId,
'acl':'public-read',
'policy':policyBase64,
'signature':SIGNATURE,
'Content-Type':file.type,
'file':file,
"enctype":"multipart/form-data",
}
}
HTTP.call('POST','https://'+BucketName+'.s3.amazonaws.com/',options,function(error,result){
if(error){
console.log("and HTTP ERROR:",error);
}else{
console.log("result:",result);
}
});
and her I'm encoding the policy and the signature:
encodePolicy=function(jsonPolicy){
// stringify the policy, store it in a NodeJS Buffer object
var buffer=new Buffer(JSON.stringify(jsonPolicy));
// convert it to base64
var policy=buffer.toString("base64");
// replace "/" and "+" so that it is URL-safe.
return policy.replace(/\//g,"_").replace(/\+/g,"-");
}
encodeSignature=function(policy,secret){
var hmac=crypto.createHmac("sha256",secret);
hmac.update(policy);
return hmac.digest("hex");
}
A can't figure out whats going on. There might already be a problem at the POST method, or the encryption, because I don't know these methods too well. If someone could point me to the right direction, to encode, or send POST request to AmazonS3 properly, it could help a lot.
(I don't like to use filepicker.io, because I don't want to force the client to sign up there as well.)
Thanks in advance!!!
Direct uploads to S3 you can use the slingshot package:
meteor add edgee:slingshot
On the server side declare your directive:
Slingshot.createDirective("myFileUploads", Slingshot.S3Storage, {
bucket: "mybucket",
allowedFileTypes: ["image/png", "image/jpeg", "image/gif"],
acl: "public-read",
authorize: function () {
//You can add user restrictions here
return true;
},
key: function (file) {
return file.name;
}
});
This directive will generate policy and signature automatically.
And them just upload it like this:
var uploader = new Slingshot.Upload("myFileUploads");
uploader.send(document.getElementById('input').files[0], function (error, url) {
Meteor.users.update(Meteor.userId(), {$push: {"profile.files": url}});
});
Why don't you use the aws-sdk package? It packs all the needed methods for you. For example, here's the simple function for adding a file to bucket:
s3.putObject({
Bucket: ...,
ACL: ...,
Key: ...,
Metadata: ...,
ContentType: ...,
Body: ...,
}, function(err, data) {
...
});
check out the S3 meteor package. The readme has a very comprehensive walkthrough of how to get started
First thing is to add the package for s3 file upload.
For Installation: ADD (AWS SDK Smart Package)
$ meteor add peerlibrary: aws-sdk
1.Create Directive upload.js and paste this code.
angular.module('techno')
.directive("fileupload", [function () {
return {
scope: {
fileupload: "="
},
link: function(scope,element, attributes){
$('.button-collapse').sideNav();
element.bind("change", function (event) {
scope.$apply(function () {
scope.fileupload = event.target.files[0];
});
})
}};
}]);
2.Get Access key and paste it in your fileUpload.js file.
AWS.config.update({
accessKeyId: ' AKIAJ2TLJBEUO6IJLKMN ',
secretAccessKey: lqGE9o4WkovRi0hCFPToG0B6w9Okg/hUfpVr6K6g'
});
AWS.config.region = 'us-east-1';
let bucket = new AWS.S3();
3.Now put this upload code in your directive fileUpload.js
vm.upload = (Obj) =>{
vm.loadingButton = true;
let name = Obj.name;
let params = {
Bucket: 'technodheeraj',
Key: name,
ContentType: 'application/pdf',
Body: Obj,
ServerSideEncryption: 'AES256'
};
bucket.putObject(params, (err, data) => {
if (err) {
console.log('---err------->', err);
}
else {
vm.fileObject = {
userId: Meteor.userId(),
eventId: id,
fileName: name,
fileSize: fileObj.size,
};
vm.call("saveFile", vm.fileObject, (error, result) => {
if (!error){
console.log('File saved successfully');
}
})
}
})
};
4.Now in “saveFile” method paste this code
saveFile: function(file){
if(file){
return Files.insert(file);
}
};
5.In HTML paste this code
<input type="file" name="file" fileupload="file">
<button type="button" class="btn btn-info " ng-click="vm.upload(file)"> Upload File</button>

Categories