The data from datastream of minio is coming multiple times - javascript

The data (in Uint8Array format) from the minio client sometimes comes only once, but other times it comes 3-4 times.
s3Client.getObject(
bucketname,
`folder/file.jpg`,
function(err, dataStream) {
if (err) {
onError();
}
dataStream.on("data", function(data) {
console.log(data, "data");
const urlData = new Blob([data]);
}
}
)
The console is giving me the response:
Uint8Array(65536) [255, 216, 255, 224, 0, 16, 74, 70, 73, 70, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 255, 219, 0, 67, 0, 8, 6, 6, 7, 6, 5, 8, 7, 7, 7, 9, 9, 8, 10, 12, 20, 13, 12, 11, 11, 12, 25, 18, 19, 15, 20, 29, 26, 31, 30, 29, 26, 28, 28, 32, 36, 46, 39, 32, 34, 44, 35, 28, 28, 40, 55, 41, 44, 48, 49, 52, 52, 52, 31, 39, 57, 61, 56, 50, 60, 46, 51, 52, 50, 255, 219, 0, 67, 1, 9, 9, 9, 12, 11, 12, …] "data"
Uint8Array(36498) [90, 44, 146, 187, 26, 132, 98, 175, 32, 178, 248, 209, 107, 36, 152, 189, 209, 166, 133, 63, 189, 12, 194, 67, 249, 16, 191, 206, 189, 7, 71, 214, 180, 253, 122, 201, 110, 244, 235, 149, 154, 18, 112, 113, 193, 83, 232, 65, 228, 26, 230, 181, 31, 133, 222, 27, 188, 129, 197, 181, 180, 150, 115, 30, 146, 67, 35, 28, 31, 247, 88, 145, 143, 202, 160, 248, 125, 225, 13, 71, 194, 218, 134, 171, 246, 199, 70, 130, 80, 139, 11, 198, 220, 73, 130, 220, 145, 212, 17, 158, …] "data"
And after refreshing the page for the same image, the console is:
Uint8Array(102034) [255, 216, 255, 224, 0, 16, 74, 70, 73, 70, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0, 255, 219, 0, 67, 0, 8, 6, 6, 7, 6, 5, 8, 7, 7, 7, 9, 9, 8, 10, 12, 20, 13, 12, 11, 11, 12, 25, 18, 19, 15, 20, 29, 26, 31, 30, 29, 26, 28, 28, 32, 36, 46, 39, 32, 34, 44, 35, 28, 28, 40, 55, 41, 44, 48, 49, 52, 52, 52, 31, 39, 57, 61, 56, 50, 60, 46, 51, 52, 50, 255, 219, 0, 67, 1, 9, 9, 9, 12, 11, 12, …] "data"
I am already calling it in useEffect. It is hitting the minio client single time but function (err, dataStream) is not giving consistent data.

Because it appends different chunks of the file each time.
You can get the entire data from dataSteram.on("end", ...)
let size = 0
minioClient.getObject('mybucket', 'photo.jpg', function(err, dataStream) {
if (err) {
return console.log(err)
}
dataStream.on('data', function(chunk) {
size += chunk.length
})
dataStream.on('end', function() {
console.log('End. Total size = ' + size)
console.log(dataStream)
console.log(dataStream.socket._tlsOptions.session)
})
dataStream.on('error', function(err) {
console.log(err)
})
})

Related

Decompressing bytes in C#

I have a simple program which is decompressing a byte array in C#.
There is the code of "Decompress" func:
private byte[] Decompress(byte[] compressed)
{
using var from = new MemoryStream(compressed);
using var to = new MemoryStream();
using var gZipStream = new GZipStream(from, CompressionMode.Decompress);
gZipStream.CopyTo(to);
return to.ToArray();
}
and I have two byte arrays
var first = new byte[] { 31, 139, 8, 0, 0, 0, 0, 0, 0, 10, 45, 202, 65, 10, 128, 48, 12, 5, 209, 171, 132, 28, 195, 219, 68, 251, 91, 11, 38, 41, 105, 92, 137, 119, 23, 197, 221, 240, 152, 139, 39, 80, 120, 185, 88, 13, 234, 214, 55, 94, 88, 197, 154, 83, 115, 73, 154, 103, 84, 217, 64, 56, 48, 118, 177, 164, 130, 57, 164, 7, 5, 20, 186, 226, 141, 22, 72, 90, 97, 168, 61, 41, 251, 167, 7, 124, 72, 20, 250, 167, 233, 145, 124, 223, 15, 199, 8, 21, 161, 110, 0, 0, 0 };
var second = new byte[] { 31, 139, 8, 0, 0, 0, 0, 0, 0, 10, 1, 80, 0, 175, 255, 34, 127, 33, 66, 194, 252, 100, 6, 77, 155, 56, 146, 208, 225, 255, 219, 203, 33, 122, 153, 14, 246, 130, 28, 163, 183, 127, 145, 96, 71, 208, 206, 123, 9, 122, 133, 204, 255, 86, 191, 73, 197, 91, 247, 121, 156, 180, 101, 176, 125, 158, 123, 114, 192, 252, 70, 193, 148, 180, 183, 200, 146, 79, 121, 135, 152, 90, 247, 107, 168, 157, 128, 84, 121, 4, 122, 92, 145, 45, 223, 177, 179, 189, 149, 80, 0, 0, 0 }
my next step is:
var decompressed = Decompress(first);
var result = Encoding.UTF8.GetString(decompressed);
Output: {"seed":{"mnemonic":"mango goat surface elephant despair remember regret benefit timber leopard member sort"}}
var decompressed = Decompress(second);
var result = Encoding.UTF8.GetString(decompressed);
Output: "!B??d♠M?8??????!z???∟???`G??{ z???V?I?[?y??e?}?{r??F?????Oy??Z?k???Ty♦z\?-?
The structure of these arrays is the same... length (103), first 10 elements and last 3 elements. But first array decompresses with func Decompress fine and the second one is very bad. I don't know why it's happening. Someone explain where is my mistake.
I tried to decompress with python using gzip but I have the same result. There is the code
import gzip
first = [31, 139, 8, 0, 0, 0, 0, 0, 0, 10, 45, 202, 65, 10, 128, 48, 12, 5, 209, 171, 132, 28, 195, 219, 68, 251, 91, 11, 38, 41, 105, 92, 137, 119, 23, 197, 221, 240, 152, 139, 39, 80, 120, 185, 88, 13, 234, 214, 55, 94, 88, 197, 154, 83, 115, 73, 154, 103, 84, 217, 64, 56, 48, 118, 177, 164, 130, 57, 164, 7, 5, 20, 186, 226, 141, 22, 72, 90, 97, 168, 61, 41, 251, 167, 7, 124, 72, 20, 250, 167, 233, 145, 124, 223, 15, 199, 8, 21, 161, 110, 0, 0, 0 ];
second = [31, 139, 8, 0, 0, 0, 0, 0, 0, 10, 1, 80, 0, 175, 255, 34, 127, 33, 66, 194, 252, 100, 6, 77, 155, 56, 146, 208, 225, 255, 219, 203, 33, 122, 153, 14, 246, 130, 28, 163, 183, 127, 145, 96, 71, 208, 206, 123, 9, 122, 133, 204, 255, 86, 191, 73, 197, 91, 247, 121, 156, 180, 101, 176, 125, 158, 123, 114, 192, 252, 70, 193, 148, 180, 183, 200, 146, 79, 121, 135, 152, 90, 247, 107, 168, 157, 128, 84, 121, 4, 122, 92, 145, 45, 223, 177, 179, 189, 149, 80, 0, 0, 0 ];
bytes_of_values = bytes(first)
decompressed_block = gzip.decompress(bytes_of_values)
print(decompressed_block.decode('utf8', 'ignore'))
#Output: {"seed":{"mnemonic":"mango goat surface elephant despair remember regret benefit timber leopard member sort"}}
bytes_of_values = bytes(second)
decompressed_block = gzip.decompress(bytes_of_values)
print(decompressed_block.decode('utf8', 'ignore'))
#Output: "!Bd♠M8!z∟`G{ zVI[ye}{rFȒOyZkTy♦z\-
UPDATE #1
FIRST ARRAY was expanded with this JS function.
const crypto = require('crypto')
module.exports = function (size ) {
return {
expand (data ) {
const buffer = crypto.randomBytes(data.length < size - 4 ? size : data.length + 4)
buffer.writeUInt32BE(data.length, 0)
data.copy(buffer, 4, 0)
return buffer
},
shrink (buffer ) {
const dataLen = buffer.readUInt32BE(0)
return buffer.slice(4, dataLen + 4)
}
}
}
Where size = 2 ** 15 (2^15 = 32768); data = {"seed":{"mnemonic":"mango goat surface elephant despair remember regret benefit timber leopard member sort"}}. In output we have a Buffer with size 32768. After that I take this Buffer and convert to string like "[1, 2, 3 ,4 ,5, etc.]" (remind that size is 32768) and pass this to C#. In C# I use this code:
var shrink = BinaryPrimitives.ReadUInt32BigEndian(data); //data == array with size 32768 (string from JS)
//shrink == 103 (integer)
var segment = new ArraySegment<byte>(data, 4, (int)shrink).ToArray();
// segment == FIRST ARRAY
after following these steps I get the FIRST ARRAY and it decompresses fine. The structure of the first array are identical to SECOND ARRAY. And I understood that SECOND ARRAY uses the same algorithm. But it doesn't unpack.

reduce the size of a javascript array while keeping members that are evenly distributed in the original

Hard to put into words but I am looking for a function that takes an array and reduces it down to a given size. The use case is specifying ticks for graph in d3.
const availableDatetimes: Date[] = [...]
const numberOfXTicks = chartWidth / 80
const tickValues = reduceArrSize(availableDatetimes, numberOfXTicks)
the result should have evenly distributed dates
I called it shrinkArray(), because reducing an array is the name of a different well-defined operation. Here it is:
const shrinkArray = (array, size) => {
const step = array.length / size
return array.filter((v, i) => Math.floor(i % step) == 0)
}
Let's shrink an array of size 100 to size 33:
const originalArray = (new Array(100).fill(0)).map((v, i) => i)
const shrunkenArray = shrinkArray(originalArray, 33)
console.log(shrunkenArray);
console.log(shrunkenArray.length == 33);
So:
[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71,
72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83,
84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95,
96, 97, 98, 99
]
is shrunken to:
[
0, 4, 7, 10, 13, 16, 19, 22, 25,
28, 31, 34, 37, 40, 43, 46, 49, 53,
56, 59, 62, 65, 68, 71, 74, 77, 80,
83, 86, 89, 92, 95, 98
]
that is of size 33.
In this example the array is integers from 0 to 99, but obviously the function works for any data type.

Face match with TensorFlow.js between image stored in DB and real-time webcam image

I am not familiar with TensorFlow.js. I am creating a module in React and Node.js with TensorFlow.js.
I am able to capture image first and convert image to Tensor with this code:
const imageDataURL = this.canvasRef.current.toDataURL('image/png');
const b = Buffer.from(imageDataURL, 'base64');
getting result:
Uint8Array(297020) [117, 171, 90, 138, 102, 160, 123, 250, 103, 129, 182, 172, 123, 174, 34, 84, 19, 145, 195, 66, 134, 130, 128, 0, 0, 3, 82, 82, 17, 20, 128, 0, 0, 150, 0, 0, 0, 125, 2, 1, 128, 0, 0, 56, 84, 20, 245, 0, 0, 8, 0, 18, 81, 16, 85, 30, 23, 187, 47, 90, 100, 201, 50, 92, 73, 90, 59, 132, 94, 89, 199, 119, 67, 71, 136, 185, 32, 185, 43, 50, 34, 28, 25, 143, 243, 63, 190, 198, 68, 144, 40, 211, 192, 136, 57, 242, 242, 43, 59, 20, …]
And with this code:
const tensor = tf.browser.fromPixels(this.videoRef.current);
result is:
Tensor {kept: false, isDisposedInternal: false, shape: Array(3), dtype: "int32", size: 921600, …}
dataId: {id: 4811}
dtype: "int32"
id: 3541
isDisposedInternal: false
kept: false
rankType: "3"
scopeId: 6508
shape: (3) [480, 640, 3]
size: 921600
strides: (2) [1920, 3]
isDisposed: (...)
rank: (...)
__proto__: Object
I need to convert the first image to Tensor, after that I need to match the face with real-time webcam: is person matching, true or false.
I am very confused on how to write the logic.

running async JS functions on BigQuery with #standardSQL

Now that BigQuery supports async on #standardSQL, how can I convert this #legacySQL function to run on #standardSQL?
#legacySQL
SELECT SUM(s)
FROM
js((
SELECT FLOOR(RAND()*100000) group, NEST(requests) as x
FROM (
SELECT requests, content_size
FROM [fh-bigquery:wikipedia.pagecounts_201205]
)
GROUP BY group)
, group, x
, "[{name:'s', type: 'float'}]",
"function (row, emit) {
const memory = new WebAssembly.Memory({ initial: 256, maximum: 256 });
const env = {
'abortStackOverflow': _ => { throw new Error('overflow'); },
'table': new WebAssembly.Table({ initial: 0, maximum: 0, element: 'anyfunc' }),
'tableBase': 0,
'memory': memory,
'memoryBase': 1024,
'STACKTOP': 0,
'STACK_MAX': memory.buffer.byteLength,
};
const imports = { env };
const bytes = new Uint8Array([0, 97, 115, 109, 1, 0, 0, 0, 1, 139, 128, 128, 128, 0, 2, 96, 1, 127, 0, 96, 2, 127, 127, 1, 127, 2, 254, 128, 128, 128, 0, 7, 3, 101, 110, 118, 8, 83, 84, 65, 67, 75, 84, 79, 80, 3, 127, 0, 3, 101, 110, 118, 9, 83, 84, 65, 67, 75, 95, 77, 65, 88, 3, 127, 0, 3, 101, 110, 118, 18, 97, 98, 111, 114, 116, 83, 116, 97, 99, 107, 79, 118, 101, 114, 102, 108, 111, 119, 0, 0, 3, 101, 110, 118, 6, 109, 101, 109, 111, 114, 121, 2, 1, 128, 2, 128, 2, 3, 101, 110, 118, 5, 116, 97, 98, 108, 101, 1, 112, 1, 0, 0, 3, 101, 110, 118, 10, 109, 101, 109, 111, 114, 121, 66, 97, 115, 101, 3, 127, 0, 3, 101, 110, 118, 9, 116, 97, 98, 108, 101, 66, 97, 115, 101, 3, 127, 0, 3, 130, 128, 128, 128, 0, 1, 1, 6, 147, 128, 128, 128, 0, 3, 127, 1, 35, 0, 11, 127, 1, 35, 1, 11, 125, 1, 67, 0, 0, 0, 0, 11, 7, 136, 128, 128, 128, 0, 1, 4, 95, 115, 117, 109, 0, 1, 9, 129, 128, 128, 128, 0, 0, 10, 196, 128, 128, 128, 0, 1, 190, 128, 128, 128, 0, 1, 7, 127, 2, 64, 35, 4, 33, 8, 35, 4, 65, 16, 106, 36, 4, 35, 4, 35, 5, 78, 4, 64, 65, 16, 16, 0, 11, 32, 0, 33, 2, 32, 1, 33, 3, 32, 2, 33, 4, 32, 3, 33, 5, 32, 4, 32, 5, 106, 33, 6, 32, 8, 36, 4, 32, 6, 15, 0, 11, 0, 11]);
WebAssembly.instantiate(bytes, imports).then(wa => {
const exports = wa.instance.exports;
const sum = exports._sum;
for (var i = 0, len = row.x.length; i < len; i++) {
emit({s: sum(row.x[i], row.x[i])});
}
});
}"
)
(from https://medium.com/#hoffa/bigquery-beyond-sql-and-js-running-c-and-rust-code-at-scale-33021763ee1f)
Now you can create an async function x() and then return x().
Instead of being able to emit() multiple times, you'll have to nest the results into array.
Working example:
CREATE TEMP FUNCTION `magic_function`(x ARRAY<INT64>) RETURNS ARRAY<INT64> LANGUAGE js AS '''
const memory = new WebAssembly.Memory({ initial: 256, maximum: 256 });
const env = {
'abortStackOverflow': _ => { throw new Error('overflow'); },
'table': new WebAssembly.Table({ initial: 0, maximum: 0, element: 'anyfunc' }),
'tableBase': 0,
'memory': memory,
'memoryBase': 1024,
'STACKTOP': 0,
'STACK_MAX': memory.buffer.byteLength,
};
const imports = { env };
const bytes = new Uint8Array([0, 97, 115, 109, 1, 0, 0, 0, 1, 139, 128, 128, 128, 0, 2, 96, 1, 127, 0, 96, 2, 127, 127, 1, 127, 2, 254, 128, 128, 128, 0, 7, 3, 101, 110, 118, 8, 83, 84, 65, 67, 75, 84, 79, 80, 3, 127, 0, 3, 101, 110, 118, 9, 83, 84, 65, 67, 75, 95, 77, 65, 88, 3, 127, 0, 3, 101, 110, 118, 18, 97, 98, 111, 114, 116, 83, 116, 97, 99, 107, 79, 118, 101, 114, 102, 108, 111, 119, 0, 0, 3, 101, 110, 118, 6, 109, 101, 109, 111, 114, 121, 2, 1, 128, 2, 128, 2, 3, 101, 110, 118, 5, 116, 97, 98, 108, 101, 1, 112, 1, 0, 0, 3, 101, 110, 118, 10, 109, 101, 109, 111, 114, 121, 66, 97, 115, 101, 3, 127, 0, 3, 101, 110, 118, 9, 116, 97, 98, 108, 101, 66, 97, 115, 101, 3, 127, 0, 3, 130, 128, 128, 128, 0, 1, 1, 6, 147, 128, 128, 128, 0, 3, 127, 1, 35, 0, 11, 127, 1, 35, 1, 11, 125, 1, 67, 0, 0, 0, 0, 11, 7, 136, 128, 128, 128, 0, 1, 4, 95, 115, 117, 109, 0, 1, 9, 129, 128, 128, 128, 0, 0, 10, 196, 128, 128, 128, 0, 1, 190, 128, 128, 128, 0, 1, 7, 127, 2, 64, 35, 4, 33, 8, 35, 4, 65, 16, 106, 36, 4, 35, 4, 35, 5, 78, 4, 64, 65, 16, 16, 0, 11, 32, 0, 33, 2, 32, 1, 33, 3, 32, 2, 33, 4, 32, 3, 33, 5, 32, 4, 32, 5, 106, 33, 6, 32, 8, 36, 4, 32, 6, 15, 0, 11, 0, 11]);
async function main() {
const wa = await WebAssembly.instantiate(bytes, imports);
const exports = wa.instance.exports;
const magic_sum = exports._sum;
return x.map((val) => {
return magic_sum(val, val);
});
}
return main();
''';
SELECT SUM(s) sum_s
FROM (
SELECT FLOOR(RAND()*100000) grp, magic_function(ARRAY_AGG(views)) s
FROM `fh-bigquery.wikipedia_v3.pageviews_2019`
WHERE DATE(datehour) = '2019-01-01' AND wiki='pt'
GROUP BY grp
), UNNEST(s) s
(https://issuetracker.google.com/issues/138430827#comment9)
(h/t https://twitter.com/mylesborins)

Array concat of the Uint8Array, output of the Chip And Pin Device

I'm getting a output data from a device as series of array. This array comes 5 times each time carrying a 64 item in it while swiping a card into the device. I want to concat these array. Below is the final array I got. I know array.concat(array2) should work but due to some reason it is not. Can anybody tell me what am I doing wrong here or any suggestion
(5) [Uint8Array(64), Uint8Array(64), Uint8Array(64), Uint8Array(64), Uint8Array(64)]
0
:
Uint8Array(64) [2, 86, 105, 86, 79, 116, 101, 99, 104, 50, 0, 2, 0, 0, 238, 200, 223, 238, 35, 130, 0, 213, 2, 207, 0, 128, 91, 55, 37, 0, 3, 131, 1, 9, 37, 42, 53, 49, 57, 54, 42, 42, 42, 42, 42, 42, 42, 42, 48, 53, 50, 50, 94, 75, 65, 78, 73, 77, 79, 90, 72, 73, 32, 32]
1
:
Uint8Array(64) [3, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 47, 94, 51, 53, 48, 53, 42, 42, 42, 63, 42, 59, 53, 49, 57, 54, 42, 42, 42, 42, 42, 42, 42, 42, 48, 53, 50, 50, 61, 51, 53, 48, 53, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 63, 42, 214]
2
:
Uint8Array(64) [3, 110, 241, 211, 17, 223, 130, 10, 242, 35, 65, 176, 170, 72, 224, 121, 99, 248, 102, 252, 131, 180, 174, 84, 98, 54, 209, 168, 39, 193, 225, 129, 138, 114, 155, 135, 73, 118, 101, 68, 229, 32, 42, 91, 254, 210, 26, 189, 197, 26, 64, 21, 112, 196, 228, 181, 216, 218, 56, 94, 46, 77, 43, 130]
3
:
Uint8Array(64) [3, 72, 80, 22, 34, 37, 108, 147, 123, 170, 72, 79, 190, 73, 86, 3, 108, 152, 228, 68, 253, 69, 18, 101, 201, 56, 114, 142, 156, 174, 55, 197, 194, 98, 153, 73, 2, 0, 0, 1, 64, 2, 136, 186, 104, 3, 159, 57, 1, 144, 255, 238, 1, 5, 223, 238, 48, 1, 12, 223, 238, 38, 1, 200]
4
:
Uint8Array(64) [4, 120, 136, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
length
:
5
proto
:
Array(0)
Merge Array with Push:
const array1 = [2, 7, 4];
const array2 = [3, 5,9];
array1.push(...array2);
console.log(array1)
Using Concat and Spread operator:
const array1 = [1,2];
const array2 = [3,4];
// Method 1: Concat
const combined1 = [].concat(array1, array2);
// Method 2: Spread
const combined2 = [...array1, ...array2];
console.log(combined1);
console.log(combined2);
You can also use Buffer.concat which will return a Buffer object. If you need to result to be of type Uint8Array, you can wrap the result of whatever method you chose like that:
new Uint8Array(concatedArrays)

Categories