Performing PCA per image over imageCollection in Google Earth Engine - javascript

I need to perform a PCA per image over a image collection. Then, I want to only keep Principle component axis 1, and add this as a band to every image within my image collection. Ultimately, I want to export a .csv file with GPS sampling locations at row headers and image ID as column headers with mean Principle component axis 1 as values. The idea behind doing this, is that I want a proxy (spectral heterogeneity) to use in further statistical analysis in R.
Here is the code I have thus far:
//Create an test image to extract information to be used during PCA
var testImage =ee.Image('LANDSAT/LC08/C01/T1_SR/LC08_168080_20130407')
.select(['B2', 'B3', 'B4', 'B5', 'B6', 'B7'],
['Blue', 'Green', 'Red', 'NIR', 'SWIR1', 'SWIR2']);
// Define variables for PCA
var region = Extent;
var scale = testImage.projection().nominalScale();
var bandNames = testImage.bandNames();
Map.centerObject(region);
// Function for performing PCA
function doPCA(image){
// This code is from https://code.earthengine.google.com/7249153a8a0f5c79eaf562ed45a7adad
var meanDict = image.reduceRegion({
reducer: ee.Reducer.mean(),
geometry: region,
scale: scale,
maxPixels: 1e9
});
var means = ee.Image.constant(meanDict.values(bandNames));
var centered = image.subtract(means);
// This helper function returns a list of new band names.
var getNewBandNames = function(prefix) {
var seq = ee.List.sequence(1, bandNames.length());
return seq.map(function(b) {
return ee.String(prefix).cat(ee.Number(b).int());
});
};
// [START principal_components]
var getPrincipalComponents = function(centered, scale, region) {
var arrays = centered.toArray();
var covar = arrays.reduceRegion({
reducer: ee.Reducer.centeredCovariance(),
geometry: region,
scale: scale,
maxPixels: 1e9
});
var covarArray = ee.Array(covar.get('array'));
var eigens = covarArray.eigen();
var eigenValues = eigens.slice(1, 0, 1);
var eigenVectors = eigens.slice(1, 1);
var arrayImage = arrays.toArray(1);
var principalComponents = ee.Image(eigenVectors).matrixMultiply(arrayImage);
var sdImage = ee.Image(eigenValues.sqrt())
.arrayProject([0]).arrayFlatten([getNewBandNames('sd')]);
return principalComponents
.arrayProject([0])
.arrayFlatten([getNewBandNames('pc')])
.divide(sdImage);
};
var pcImage = getPrincipalComponents(centered, scale, region);
return (pcImage);
}
// map PCA function over collection
var PCA = LandsatCol.map(function(image){return doPCA(image)});
print('pca', PCA);
Extent is my ROI, whereas LandsatCol is a preproccessed image collection. The code here produces an Error when trying to map the PCA over the image collection (second last line of code). The error reads: "Array: Parameter 'values' is required".
Any suggestions on how to deal with this? And how to add Principle component axis 1 as a band per image over the image collection?

I figured it out. The error "Array: Parameter 'values' is required" had to do with sparse matrices, which was a product of filtering, clipping and spesifying regions within to perform PCA. Earth Engine can not work with sparse matrices.
Here is the working code. LandsatCol is my preproccessed image collection.
// Display AOI
var point = ee.Geometry.Point([30.2261, -29.458])
Map.centerObject(point,10);
// Prepairing imagery for PCA
var Preped = LandsatCol.map(function(image){
var orig = image;
var region = image.geometry();
var scale = 30;
var bandNames = ['Blue', 'Green', 'Red', 'NIR', 'SWIR1', 'SWIR2'];
var meanDict = image.reduceRegion({
reducer: ee.Reducer.mean(),
geometry: region,
scale: scale,
maxPixels: 1e9
});
var means = ee.Image.constant(meanDict.values(bandNames));
var centered = image.subtract(means);
var getNewBandNames = function(prefix) {
var seq = ee.List.sequence(1, 6);
return seq.map(function(b) {
return ee.String(prefix).cat(ee.Number(b).int());
});
};
// PCA function
var getPrincipalComponents = function(centered, scale, region) {
var arrays = centered.toArray();
var covar = arrays.reduceRegion({
reducer: ee.Reducer.centeredCovariance(),
geometry: region,
scale: scale,
maxPixels: 1e9
});
var covarArray = ee.Array(covar.get('array'));
var eigens = covarArray.eigen();
var eigenValues = eigens.slice(1, 0, 1);
var eigenVectors = eigens.slice(1, 1);
var arrayImage = arrays.toArray(1);
var principalComponents = ee.Image(eigenVectors).matrixMultiply(arrayImage);
var sdImage = ee.Image(eigenValues.sqrt())
.arrayProject([0]).arrayFlatten([getNewBandNames('sd')]);
return principalComponents.arrayProject([0])
.arrayFlatten([getNewBandNames('pc')])
.divide(sdImage);
};
var pcImage = getPrincipalComponents(centered, scale, region);
return ee.Image(image.addBands(pcImage));
});
print("PCA imagery: ",Preped);

Related

THREE JS - Rendering on canvas inside of loop not updating canvas

I've got the below code, which takes in a number of scans. I know this to always be 2880. The canvas should split an entire 360° into 2880 sectors. The loop in the code below will always run from 0 to 2880, and in each loop, a bunch of (maybe several hundred) 2px coloured points are rendered in that sector, emanating from the centre of the canvas outward. The loop moves fast, before I upgraded the THREE package, this loop could render in c. 15 seconds.
The picture draws correctly, but what confuses me is the fact that the call to THREE's render message happens inside of the loop, yet the picture draws nothing until the last iteration of the loop is complete and then all 2880 sectors appear at once, which isn't the effect I'm going for.
Can anyone advise what I might be missing? It's a 2-D non-interactable image.
Stuff I've tried:-
setTimeout(null, 1000) after the .render() method to make it wait before executing the next iteration of the loop
Considered making it a recursive function with the next iteration of the loop inside of the above setTimeout
Reversing the THREE upgrade as an absolute last resort.
Stuff I've considered:-
Is the loop running two fast for the frame rate or not giving the screen enough time to update?
Limitation of THREEJs?
const drawReflectivityMap = (scans, reflectivityData, azimuthData, scene, renderer, totalScans, currentScan, cameraPosition, camera, reflectivityColours) => {
currentCamera = camera;
currentRenderer = renderer;
for (let i = 0; i < scans; i++) {
console.log('Drawing Reflectivity ' + i);
var reflectivity = reflectivityData[i];
var azimuth = utils.radians(azimuthData[i]);
var sinMultiplier = Math.sin(azimuth);
var cosMultiplier = Math.cos(azimuth);
var initialRange = mikeUtilities.addRange(mikeUtilities.multiplyRange(mikeUtilities.createRange(0, reflectivity.GateCount, 1), reflectivity.GateSize), reflectivity.FirstGate);
var x = utils.multiplyRange(initialRange, sinMultiplier);
var y = utils.multiplyRange(initialRange, cosMultiplier);
var dataSet = {
x: x,
y: y,
reflectivity: reflectivity
};
var reflectivityColourScale = d3.scaleQuantize().domain([-32.0, 94.5]).range(reflectivityColours);
var pointsMaterial = new THREE.PointsMaterial({
size: 2,
vertexColors: true,
sizeAttenuation: false
});
// x co-ordinate points for each point in this arc
var x = dataSet.x;
// y co-ordinate points for each point in this arc
var y = dataSet.y;
// Reflectivity (rainfall) intensity values for each point in this arc
var reflectivity = dataSet.reflectivity;
var geometry = new THREE.BufferGeometry();
var pointsGraph = [];
var coloursGraph = [];
x.forEach(function (index, i) {
if (reflectivity.MomentDataValues[i] > -33) {
geometry = new THREE.BufferGeometry();
var dataPointColour = new THREE.Color(reflectivityColourScale(reflectivity.MomentDataValues[i]));
pointsGraph.push(x[i], y[i], 0);
coloursGraph.push(dataPointColour.r, dataPointColour.g, dataPointColour.b);
}
});
var pointsGraphArray = new Float32Array(pointsGraph);
var coloursGraphArray = new Float32Array(coloursGraph);
geometry.setAttribute('position', new THREE.BufferAttribute(pointsGraphArray, 3));
geometry.setAttribute('color', new THREE.BufferAttribute(coloursGraphArray, 3));
var pointsMap = new THREE.Points(geometry, pointsMaterial);
scene.add(pointsMap);
renderScene(scene, cameraPosition, renderer);
}
}
function renderScene(scene, cameraPosition,renderer) {
currentCamera.position.z = cameraPosition;
currentRenderer.render(scene, currentCamera);
requestAnimationFrame(renderScene);
}
for (let i = 0; i < scans; i++) {
setTimeout(() => {
// Your code goes here
} i * 100)
}

GEE Animation, print thumbnail black

The following code is conceived to create a thumbnail image of landsat 7 images:
// Feature Collection
var aoi = geometry
print(aoi);
Map.addLayer(aoi);
var centroid = aoi.centroid(1)
print(centroid);
var coors = centroid.coordinates().getInfo()
var x = coors[0]
var y = coors[1];
Map.setCenter(x, y, 10);
// Elaborating the dates
// Getting Temperatures for Every Month
var period = ['-01-01', '-12-01'];
var years = [['1999', '2000'],
['2000', '2001'],
['2001', '2002'],
['2002', '2003'],
['2003', '2004'],
['2004', '2005'],
['2005', '2006'],
['2006', '2007'],
['2007', '2008'],
['2008', '2009'],
['2009', '2010'],
['2010', '2011'],
['2011', '2012'],
['2012', '2013'],
['2013', '2014'],
];
var add_period = function(year){
var start_date = period[0];
var end_date = period[1];
return [year[0] + start_date, year[1] + end_date];
};
var concatenate_year_with_periods = function(years, period){
return years.map(add_period);
};
var Dates = concatenate_year_with_periods(years, period);
/**********************************************************************
Landsat 7
***********************************************************************/
var visualization = {
bands: ['B4', 'B3', 'B2'],
min: 0.0,
max: 0.3,
};
var visualization_ = {
bands: ['B4_median', 'B3_median', 'B2_median'],
min: 0.0,
max: 0.3,
gamma: [0.95, 1.1, 1]
};
// Applies scaling factors.
var cloudMaskL7 = function(image) {
var qa = image.select('BQA');
var cloud = qa.bitwiseAnd(1 << 4)
.and(qa.bitwiseAnd(1 << 6))
.or(qa.bitwiseAnd(1 << 8));
var mask2 = image.mask().reduce(ee.Reducer.min());
return image
//.select(['B3', 'B4'], ['Red', 'NIR'])
.updateMask(cloud.not()).updateMask(mask2)
.set('system:time_start', image.get('system:time_start'));
};
var dataset = ee.ImageCollection('LANDSAT/LE07/C01/T1_TOA')
.filterDate('1999-01-01', '2020-12-31')
.filterBounds(aoi)
//.map(applyScaleFactors)
.map(cloudMaskL7)
.map(function(image){return image.clip(aoi)});
// Creating composites using median pixel value
var median_yearly_landsat_7 = function(start, end){
var dataset_ = dataset.filter(ee.Filter.date(start, end));
var median_yearly = dataset_.reduce(ee.Reducer.median());
return median_yearly;
};
var composite_name_list_l7 = ee.List([]);
var apply_monthly_composite = function(date_list){
var start = date_list[0];
var end = date_list[1];
var output_name = start + "TO" + end + "_LANSAT_7";
var composite = median_yearly_landsat_7(start, end);
composite_name_list_l7 = composite_name_list_l7.add([composite, output_name]);
Map.addLayer(composite, visualization_, output_name, false);
Export.image.toDrive({
image: composite,
description: output_name,
fileFormat: 'GeoTIFF',
crs : 'EPSG:4326',
folder : 'LANDSAT_LST_LAS_LOMAS',
region: aoi
});
return 0;
};
Dates.map(apply_monthly_composite);
/******************************************************************
// Animation gif
// Create RGB visualization images for use as animation frames.
/******************************************************************/
var text = require('users/gena/packages:text');
var annotated_collection_list = ee.List([])
var annotations = [
{position: 'left', offset: '0.25%', margin: '0.25%', property: 'label', scale: 1.5} //large scale because image if of the whole world. Use smaller scale otherwise
];
var create_annotated_collection = function(image_and_id) {
var img = image_and_id[0];
var image_id = image_and_id[1];
console.log(img);
console.log(image_id);
var img_out = img.visualize(visualization_)
.clip(aoi)//.paint(municipalities, 'FF0000', 2)
.set({'label': image_id});
Map.addLayer(img_out);
var annotated = text.annotateImage(img_out, {}, Bayern, annotations);
annotated_collection.add(annotated);
return 0;
};
var municipalities_geom = geometry;
var n = composite_name_list_l7.size().getInfo();
print(n);
for (var i = 0; i < n; i++) {
var img_info = ee.List(composite_name_list_l7.get(i));
print(img_info);
var img = ee.Image(img_info.get(0));
var img_id = ee.String(img_info.get(1));
var year = ee.String(ee.List(img_id.split("-").get(0)));
var month = ee.String(ee.List(img_id.split("-").get(1)));
var img_id_ = year.getInfo() + "_" + month.getInfo();
var img_out = img.visualize(visualization_)
.set({'label': img_id_});
var annotated = text.annotateImage(img_out, {}, municipalities_geom, annotations);
Map.addLayer(annotated);
var annotated_collection_list = annotated_collection_list.add(annotated)
}
var annotated_col = ee.ImageCollection(annotated_collection_list)
// Define GIF visualization parameters.
var gifParams = {
'region': geometry,
'dimensions': 254,
'crs': 'EPSG:32632',
'framesPerSecond': 1
};
// Print the GIF URL to the console.
print(annotated_col.getVideoThumbURL(gifParams));
// Render the GIF animation in the console.
print(ui.Thumbnail(annotated_col, gifParams));
However, this thumbnail appears black, but the images I load into the map project, are in the color visualization parameters that I need.
The geometry parameter is a Polygon I drew using the drawing tool. The coordinates are below:
Coordinates: List (1 element)
0: List (5 elements)
0: [-82.35277628512759,8.432445555054713]
1: [-82.314667459444,8.432445555054713]
2: [-82.314667459444,8.460632259476993]
3: [-82.35277628512759,8.460632259476993]
4: [-82.35277628512759,8.432445555054713]
Could someone tell my why the thumbnail appears black?

How to get MapMarker inside a polygon with clustering active

We're looking for a way to get our data points that are inside a polygon using JavaScript HereMaps API
We're adding 4 datapoints to a ClusterLayer / ClusterProvider and a polygon to the map. 3 of the 4 points are within the drawn Polygon (data of the points: a, b, d). Point with data = c is not within the polygon (see jsfiddle)
We tried to use map.getObjectsWithin but this functions only returns the polygon. We assume that this is caused by the different layers.
What's the best way to get the datapoints which are in the bounds of the polygon?
We try to avoid additional dependencies to solve this issue.
quick demo:
http://jsfiddle.net/4dno0gu2/59/
We found this question, but there wasn't any example, no activity for a long time and no solution.
HereMap getObjectsWithin does not show objects in LocalObjectProvider
Yeah the method getObjectsWithin doesn't work for ObjectProvider although this implemented in JS API but simple this functionality is not a public method, apologies for it.
Short description of workaround:
obtain the bounding box of the polygon
request all types of objects where you interested in for this bounding box from the provider
(requestOverlays, requestSpatials, requestMarkers etc.) - in your case requestMarkers
filter out all objects which doesn't intersect with the polygon
Code:
/**
* Adds a polygon to the map
*
* #param {H.Map} map A HERE Map instance within the application
*/
function addPolygonToMap(map) {
var geoStrip = new H.geo.LineString(
[48.8, 13.5, 100, 48.4, 13.0, 100, 48.4, 13.5, 100]
);
var polygon = new H.map.Polygon(geoStrip, {
style: {
strokeColor: '#829',
lineWidth: 8
},
data: 'polygon'
});
//map.addObject(polygon);
return polygon;
}
function logObjectsInPolygon(map, polygon){
var geoPolygon = polygon.getGeometry();
map.getObjectsWithin(geoPolygon, (o) => {
console.log('found mapObjects: '+ o.length);
o.forEach(x=>{
if(typeof x.getData === 'function'){
console.log(x.getData());
}
});
});
}
function isPointInPolygon(testPoint, polygPoints) {
let result = false;
let j = polygPoints.length - 1;
for(i=0,len=j+1; i<len; i++){
let p = polygPoints[i];
let lP = polygPoints[j];
if(p.y < testPoint.y && lP.y >= testPoint.y || lP.y < testPoint.y && p.y >= testPoint.y){
if((p.x + (testPoint.y - p.y) / (lP.y - p.y) * (lP.x - p.x)) < testPoint.x){
result = !result;
}
}
j = i;
}
return result;
}
/**
* Boilerplate map initialization code starts below:
*/
//Step 1: initialize communication with the platform
var platform = new H.service.Platform({
apikey: 'H6XyiCT0w1t9GgTjqhRXxDMrVj9h78ya3NuxlwM7XUs',
useCIT: true,
useHTTPS: true
});
var defaultLayers = platform.createDefaultLayers();
//Step 2: initialize a map - this map is centered over Europe
var map = new H.Map(document.getElementById('map'),
defaultLayers.raster.normal.map,{
center: {lat:48.5, lng:13.45},
zoom: 10
});
//Step 3: make the map interactive
// MapEvents enables the event system
// Behavior implements default interactions for pan/zoom (also on mobile touch environments)
var behavior = new H.mapevents.Behavior(new H.mapevents.MapEvents(map));
// Create the default UI components
var ui = H.ui.UI.createDefault(map, defaultLayers);
//this marker should go to clusters if there is more data points
var dataPoints = [];
dataPoints.push(new H.clustering.DataPoint(48.5, 13.45,{}, 'a'));
dataPoints.push(new H.clustering.DataPoint(48.5001, 13.45,{}, 'b'));
dataPoints.push(new H.clustering.DataPoint(48.5002, 13.51,{}, 'c')); // not in Polygon
dataPoints.push(new H.clustering.DataPoint(48.53, 13.45,{}, 'd'));
var clusteredDataProvider = new H.clustering.Provider(dataPoints);
var layer = new H.map.layer.ObjectLayer(clusteredDataProvider);
map.addLayer(layer);
// createPolygon to select clustered (noise) points
var polygon = addPolygonToMap(map);
let extPoly = polygon.getGeometry().getExterior();
let seqPointsPoly = [];
extPoly.eachLatLngAlt((lat, lng, alt, idy) => {
seqPointsPoly.push( {y: lat, x: lng});
});
console.log("seqPointsPoly:", seqPointsPoly);
map.addEventListener("tap", (e) => {
let pBbox = polygon.getBoundingBox();
let arrPnts = clusteredDataProvider.requestMarkers(pBbox);
for(let i=0,len=arrPnts.length; i<len; i++){
let m = arrPnts[i];
let p = {y: m.getGeometry().lat, x: m.getGeometry().lng};
let clustData = m.getData();
if(!clustData.getData){
console.log("cluster: is in polygon:", isPointInPolygon(p, seqPointsPoly));
} else if(clustData.getData){
console.log("nois: is in polygon:", clustData.getData(), m.getGeometry(), isPointInPolygon(p, seqPointsPoly));
}else{
console.log("unknown type");
}
}
console.log("clusteredDataProvider:", pBbox, clusteredDataProvider.requestMarkers(pBbox));
// Our expected logging is: points a, b, d
//logObjectsInPolygon(map, polygon);
});
Worked example (tap on map to start processing): http://jsfiddle.net/m1ey7p2h/1/

PixiOverlay on leaflet, cant add another image because it is already had an entry

I'm using Pixi with PixiOverlay on leaflet. I have the following jsfiddle for a dummy simulation. The objective: once you click Add Image 2 - it adds a picture of a hamster randomly on the map.
It (almost) work.
the problem:
Error message: "BaseTexture added to the cache with an id [hamster] that already had an entry"
I couldn't figure our where to put the loader and how to integrate it properly in terms of code organization: (do I need to use it only once?) what if I have other layers to add? So I assume my challenge is here:
this.loader.load((loader, resources) => {...}
Minor: how to reduce the size of the hamster :-)
my JS code (also on jsfiddle)
class Simulation
{
constructor()
{
// center of the map
var center = [1.8650, 51.2094];
// Create the map
this.map = L.map('map').setView(center, 2);
// Set up the OSM layer
L.tileLayer(
'http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
maxZoom: 18
}).addTo(this.map);
this.imagesLayer = new L.layerGroup();
this.imagesLayer.addTo(this.map);
}
_getRandomCoord()
{
var randLat = Math.floor(Math.random() * 90);
randLat *= Math.round(Math.random()) ? 1 : -1;
var randLon = Math.floor(Math.random() * 180);
randLon *= Math.round(Math.random()) ? 1 : -1;
return [randLat,randLon]
}
addImage2()
{
this.loader = new PIXI.Loader()
this.loader.add('hamster', 'https://cdn-icons-png.flaticon.com/512/196/196817.png')
this.loader.load((loader, resources) => {
let markerTexture = resources.hamster.texture
let markerLatLng = this._getRandomCoord()
let marker = new PIXI.Sprite(markerTexture)
marker.anchor.set(0.5, 1)
let pixiContainer = new PIXI.Container()
pixiContainer.addChild(marker)
let firstDraw = true
let prevZoom
let pixiOverlay = L.pixiOverlay(utils => {
let zoom = utils.getMap().getZoom()
let container = utils.getContainer()
let renderer = utils.getRenderer()
let project = utils.latLngToLayerPoint
let scale = utils.getScale()
if (firstDraw) {
let markerCoords = project(markerLatLng)
marker.x = markerCoords.x
marker.y = markerCoords.y
}
if (firstDraw || prevZoom !== zoom) {
marker.scale.set(1 / scale)
}
firstDraw = true
prevZoom = zoom
renderer.render(container)
}, pixiContainer)
this.imagesLayer.addLayer(pixiOverlay);
})
}
addTriangle()
{
console.log("Trinalge")
var polygonLatLngs = [
[51.509, -0.08],
[51.503, -0.06],
[51.51, -15.047],
[21.509, -0.08]
];
var projectedPolygon;
var triangle = new PIXI.Graphics();
var pixiContainer = new PIXI.Container();
pixiContainer.addChild(triangle);
var firstDraw = true;
var prevZoom;
var pixiOverlay = L.pixiOverlay(function(utils) {
var zoom = utils.getMap().getZoom();
var container = utils.getContainer();
var renderer = utils.getRenderer();
var project = utils.latLngToLayerPoint;
var scale = utils.getScale();
if (firstDraw) {
projectedPolygon = polygonLatLngs.map(function(coords) {return project(coords);});
}
if (firstDraw || prevZoom !== zoom) {
triangle.clear();
triangle.lineStyle(3 / scale, 0x3388ff, 1);
triangle.beginFill(0x3388ff, 0.2);
projectedPolygon.forEach(function(coords, index) {
if (index == 0) triangle.moveTo(coords.x, coords.y);
else triangle.lineTo(coords.x, coords.y);
});
triangle.endFill();
}
firstDraw = false;
prevZoom = zoom;
renderer.render(container);
}.bind(this), pixiContainer);
this.imagesLayer.addLayer(pixiOverlay)
}
removeLayer()
{
this.imagesLayer.clearLayers();
}
}
var simulation = new Simulation();
TLDR: Updated jsfiddle:
https://jsfiddle.net/gbsdfm97/
more info below:
First problem: loading resources (textures)
There was error in console because you loaded hamster image on each click:
addImage2()
{
this.loader = new PIXI.Loader()
this.loader.add('hamster', 'https://cdn-icons-png.flaticon.com/512/196/196817.png')
this.loader.load((loader, resources) => {
...
Better approach is to load image (resource) once at beginning and then just reuse what is loaded in memory:
constructor()
{
...
this.markerTexture = null;
this._loadPixiResources();
}
...
_loadPixiResources()
{
this.loader = new PIXI.Loader()
this.loader.add('hamster', 'https://cdn-icons-png.flaticon.com/512/196/196817.png')
this.loader.load((loader, resources) => {
this.markerTexture = resources.hamster.texture;
})
}
...
addImage2()
{
...
let marker = new PIXI.Sprite(this.markerTexture);
Second problem: size of hamsters :)
Scale was set like this:
marker.scale.set(1 / scale)
Which was too big - so changed it to:
// affects size of hamsters:
this.scaleFactor = 0.05;
...
marker.scale.set(this.scaleFactor / scale);
Scale of hamsters (not triangles!) is now updated when zoom changes - so when user uses mouse scroll wheel etc.
Third problem: too many layers in pixiOverlay
Previously on each click on Add Image 2 or Add Triangle button there was added new pixiContainer and new pixiOverlay which was added as new layer: this.imagesLayer.addLayer(pixiOverlay);
New version is a bit simplified: there is only one pixiContainer and one pixiOverlay created at beginning:
constructor()
{
...
// Create one Pixi container for pixiOverlay in which we will keep hamsters and triangles:
this.pixiContainer = new PIXI.Container();
let prevZoom;
// Create one pixiOverlay:
this.pixiOverlay = L.pixiOverlay((utils, data) => {
...
}, this.pixiContainer)
this.imagesLayer.addLayer(this.pixiOverlay);
}
this.pixiOverlay is added as one layer
then in rest of program we reuse this.pixiOverlay
also we reuse this.pixiContainer because it is returned from utils - see:
let container = utils.getContainer() // <-- this is our "this.pixiContainer"
...
container.addChild(marker)
renderer.render(container)
Bonus: Triangles
Now you can add many triangles - one per each click.
Note: triangles do not change scale - this is a difference compared to hamsters.

Use multiple materials for merged geometries in Three.js

I want to create a Pine using 2 meshes, 1 for the trunk and another one for the bush, this what I've done:
var pine_geometry = new THREE.Geometry();
var pine_texture_1 = THREE.ImageUtils.loadTexture('./res/textures/4.jpg');
var pine_geometry_1 = new THREE.CylinderGeometry(25, 25, 50, 6);
var pine_material_1 = new THREE.MeshBasicMaterial({
map : pine_texture_1
});
var pine_1 = new THREE.Mesh(pine_geometry_1);
pine_1.position.x = x;
pine_1.position.y = y + 25;
pine_1.position.z = z;
pine_1.updateMatrix();
pine_geometry.merge(pine_1.geometry, pine_1.matrix);
var pine_texture_2 = THREE.ImageUtils.loadTexture('./res/textures/5.jpg');
var pine_geometry_2 = new THREE.CylinderGeometry(0, 70, 250, 8);
var pine_material_2 = new THREE.MeshBasicMaterial({
map : pine_texture_2
});
var pine_2 = new THREE.Mesh(pine_geometry_2);
pine_2.position.x = x;
pine_2.position.y = y + 175;
pine_2.position.z = z;
pine_2.updateMatrix();
pine_geometry.merge(pine_2.geometry, pine_2.matrix);
var pine = new THREE.Mesh(pine_geometry, new THREE.MeshFaceMaterial([pine_material_1, pine_material_2]));
pine.geometry.computeFaceNormals();
pine.geometry.computeVertexNormals();
Game.scene.add(pine);
The Pine is correctly positioned as I want, however, the whole merged shape only uses 1 material instead of the 2 (the whole shape is covered by the 1st) and I want that each mesh has it's respective material when mergin both.
What I'm doing wrong? any idea?
After a long research I discovered that I was missing an extra parameter for the method 'merge' from the Geometry object, the last parameter is the index of the material that the mesh must have from the materials array, ex: 0 -> first material in 'materials' array... and so on.
So, my final piece of code looks like:
pine_geometry.merge(pine_1.geometry, pine_1.matrix, 0);
var pine_texture_2 = THREE.ImageUtils.loadTexture('./res/textures/5.jpg');
var pine_geometry_2 = new THREE.CylinderGeometry(0, 70, 250, 8);
var pine_material_2 = new THREE.MeshBasicMaterial({
map : pine_texture_2
});
var pine_2 = new THREE.Mesh(pine_geometry_2);
pine_2.position.x = x;
pine_2.position.y = y + 175;
pine_2.position.z = z;
pine_2.updateMatrix();
pine_geometry.merge(pine_2.geometry, pine_2.matrix, 1);
(Note the last numbers I add to each merge).
However, I want to clarify that this practice only works when we are dealing with various geometries that are from the same type, in this case, we're merging two CylinderGeometry, but if we wanted to merge for example a Cylinder with a Box AND add the MeshFaceMaterial, it wouldn't be recognized properly and the console will throw us 'Cannot read property map/attributes from undefined', nevertheless we can still merge both geometries but not providing multiple materials (that's a terrible mistake I made).
Hope this helps to anyone.
Here's a general function to merge meshes with materials, You can also specify if you want it to return it as a buffer geometry.
function _mergeMeshes(meshes, toBufferGeometry) {
var finalGeometry,
materials = [],
mergedGeometry = new THREE.Geometry(),
mergeMaterial,
mergedMesh;
meshes.forEach(function(mesh, index) {
mesh.updateMatrix();
mesh.geometry.faces.forEach(function(face) {face.materialIndex = 0;});
mergedGeometry.merge(mesh.geometry, mesh.matrix, index);
materials.push(mesh.material);
});
mergedGeometry.groupsNeedUpdate = true;
mergeMaterial = new THREE.MeshFaceMaterial(materials);
if (toBufferGeometry) {
finalGeometry = new THREE.BufferGeometry().fromGeometry(mergedGeometry);
} else {
finalGeometry = mergedGeometry;
}
mergedMesh = new THREE.Mesh(finalGeometry, mergeMaterial);
mergedMesh.geometry.computeFaceNormals();
mergedMesh.geometry.computeVertexNormals();
return mergedMesh;
}
var mergedMesh = _mergeMeshes([trunkMesh, treeTopMesh], true);

Categories