I need to fill the polygon with texture using canvas. How can I get a polygon element to do this?
let latlngs = [
[1714, 2549],
[1776, 3336],
[934, 3121],
[836, 2382]
];
let myRenderer = L.canvas({ padding: 0.5 });
let polygon = L.polygon(latlngs, {renderer: myRenderer}).addTo(map);
Related
I'm trying to put a marker at the center point of these coordinates. After calculating the bounds, I use getCenter() to give me the coordinates. But it returns "LatLng(-93.20448, 38.902475)" in longitude latitude direction, so using them in the marker doesn't work. It needs lat lon. I realize I'm not understanding something but what is it that I don't understand.
var bounds = L.latLngBounds([[-94.778092, 39.967458], [-91.630869, 39.967458], [-91.630869, 37.8374921], [-94.778092, 37.8374921], [-94.778092, 39.967458]]);
var middle = bounds.getCenter(); alert(middle); // LatLng(-93.20448, 38.902475)
var mk5 = new L.marker(new L.latLng( 38.902475, -93.20448 );
My map is set up and the markers are in place, and it's working perfectly. I now want to add lines between the markers; so I used the code example from Leaflet and added the coords from a couple markers, but the line isn't showing on the map.
var polylinePoints = [
[3474, 12427],
[2298, 11596],
];
var polyline = L.polyline(polylinePoints).addTo(map);
Then I tried this...
var pointA = new L.LatLng(3474, 12427);
var pointB = new L.LatLng(2298, 11596);
var pointList = [pointA, pointB];
var firstpolyline = new L.Polyline(pointList, {
color: 'red',
weight: 3,
opacity: 0.5,
smoothFactor: 1
});
firstpolyline.addTo(map);
...and the line still isn't showing.
Could the problem be that I'm using pixel coordinates and not actual lat and lng coords? If so, how do I draw lines between markers using pixel coordinates?
Could the problem be that I'm using pixel coordinates and not actual lat and lng coords? If so, how do I draw lines between markers using pixel coordinates?
Definitely.
Try this:
var pointA = map.layerPointToLatLng(L.point(3474, 12427));
var pointB = map.layerPointToLatLng(L.point(2298, 11596));
var pointList = [pointA, pointB];
var firstpolyline = new L.Polyline(pointList, {
color: 'red',
weight: 3,
opacity: 0.5,
smoothFactor: 1
});
firstpolyline.addTo(map);
You may run in to problems with origin next, though, see map.getPixelOrigin()
I am trying to visualise a three.js scene in Mapbox, using an approach based on this tutorial: https://docs.mapbox.com/mapbox-gl-js/example/add-3d-model/
I have a prepared scene named "threescene", which I added to the scene of the custom layer. It contains geometries of buildings. The coordinates are in WGS84, and it seems like they would be transformed properly in the tutorial code.
However, the layer simply does not show up. I don't know if I should do something else with the coordinates, or if there is another problem. I have already attempted to normalise the coordinates within the scene.
My code is as follows:
mapboxgl.accessToken = 'pk.eyJ1IjoiamxpZW1wdCIsImEiOiJjanpzZHNhOGwxZ3RjM2JuenBpcjN4eTh3In0.dnO_1v0NDfRMZBhv-hVvjQ';
var map = window.map = new mapboxgl.Map({
container: 'map',
style: 'mapbox://styles/mapbox/light-v10',
zoom: 18,
center: [6.8309373573, 53.0475174735], // min of bbox
pitch: 60,
antialias: true // create the gl context with MSAA antialiasing, so custom layers are antialiased
});
// parameters to ensure the model is georeferenced correctly on the map
var modelOrigin = [6.8309373573, 53.0475174735]; // min of bbox
var modelAltitude = 0;
var modelRotate = [Math.PI / 2, 0, 0];
var modelAsMercatorCoordinate = mapboxgl.MercatorCoordinate.fromLngLat(modelOrigin, modelAltitude);
// transformation parameters to position, rotate and scale the 3D model onto the map
var modelTransform = {
translateX: modelAsMercatorCoordinate.x,
translateY: modelAsMercatorCoordinate.y,
translateZ: modelAsMercatorCoordinate.z,
rotateX: modelRotate[0],
rotateY: modelRotate[1],
rotateZ: modelRotate[2],
/* Since our 3D model is in real world meters, a scale transform needs to be
* applied since the CustomLayerInterface expects units in MercatorCoordinates.
*/
scale: modelAsMercatorCoordinate.meterInMercatorCoordinateUnits()
};
var THREE = window.THREE;
// configuration of the custom layer for a 3D model per the CustomLayerInterface
var customLayer = {
id: '3d-model',
type: 'custom',
renderingMode: '3d',
onAdd: function(map, gl) {
this.camera = new THREE.Camera();
this.scene = new THREE.Scene();
this.scene.add(threescene); // here I include my scene
// create two three.js lights to illuminate the model
var directionalLight = new THREE.DirectionalLight(0xffffff);
directionalLight.position.set(0, -70, 100).normalize();
this.scene.add(directionalLight);
var directionalLight2 = new THREE.DirectionalLight(0xffffff);
directionalLight2.position.set(0, 70, 100).normalize();
this.scene.add(directionalLight2);
this.map = map;
// use the Mapbox GL JS map canvas for three.js
this.renderer = new THREE.WebGLRenderer({
canvas: map.getCanvas(),
context: gl,
antialias: true
});
this.renderer.autoClear = false;
},
render: function(gl, matrix) {
var rotationX = new THREE.Matrix4().makeRotationAxis(new THREE.Vector3(1, 0, 0), modelTransform.rotateX);
var rotationY = new THREE.Matrix4().makeRotationAxis(new THREE.Vector3(0, 1, 0), modelTransform.rotateY);
var rotationZ = new THREE.Matrix4().makeRotationAxis(new THREE.Vector3(0, 0, 1), modelTransform.rotateZ);
var m = new THREE.Matrix4().fromArray(matrix);
var l = new THREE.Matrix4().makeTranslation(modelTransform.translateX, modelTransform.translateY, modelTransform.translateZ)
.scale(new THREE.Vector3(modelTransform.scale, -modelTransform.scale, modelTransform.scale))
.multiply(rotationX)
.multiply(rotationY)
.multiply(rotationZ);
this.camera.projectionMatrix.elements = matrix;
this.camera.projectionMatrix = m.multiply(l);
this.renderer.state.reset();
this.renderer.render(this.scene, this.camera);
this.map.triggerRepaint();
}
};
map.on('style.load', function() {
map.addLayer(customLayer, 'waterway-label');
});
You say your coordinates are in WGS84 so you're saying your model is in WGS84, which has units of degrees, yet later on in the code you've inherited the modelScale as in meters.
So which units is your model in, meters or degrees, and which coordinate reference system is it? As you'll need to apply the correct scale transform depending on this.
I have a map with 20 markers on a geojson layer. They are all from the same SVG and represent a certain area on the map (meaning they represent a circle of constant radius in meter on the map). I need these marker to adapt their size according to zoom level.
I've tried to use a circle as a marker. But my marker needs to be a SVG because it is complex graphic and radius doesn't apply to markers.
Here's the piece of code used to display the markers. Latitude, Longitude and rotationAngle are parsed from a .csv sheet:
for (i in chapters) {
var c = chapters[i];
if (!isNaN(parseFloat(c['Latitude'])) && !isNaN(parseFloat(c['Longitude']))) {
var lat = parseFloat(c['Latitude']);
var lon = parseFloat(c['Longitude']);
var cercleDirection = parseFloat(c['Direction']);
var photoIcon = L.icon({
iconUrl: 'media/Cercle.svg',
iconSize: [220, 220],
iconAnchor: [110, 110],
});
markers.push(
L.marker([lat, lon], {
icon: photoIcon,
rotationAngle: cercleDirection
}));
}
I have a geojson polygon adding to the map with the click of a button. I also have the style of the polygon changing on the mousedown event on the geojson and the x/y coord pairs (the geojson geometry) printing to the console accessing it through the queryRenderedFeatures call on the API.
I am now wanting to make the polygon draggable like the point example (links below) on the mousedown event on the polygon and be able to move it on the map, updating the x/y coords of the polygon nodes throughout the mousedown event, but keeping the geojson size intact throughout the drag.
Is straight mapbox-gl-js the way to do this, or should I be feeding a pre-configured geojson polygon into a mapbox-gl-draw - draw polygon mode on a user's action?
Any suggestions or examples?
API Drag A Point Example
Drag A Point GitHub Code
Try this
var isDragging = false;
var startCoords;
map.on('click', function(e) {
var features = map.queryRenderedFeatures(e.point, { layers: ['polygon-layer'] });
var polygon = features[0];
if (!polygon) return;
startCoords = polygon.geometry.coordinates[0];
});
map.on('mousedown', function(e) {
isDragging = true;
});
map.on('mousemove', function(e) {
if (!isDragging) return;
var coords = map.unproject(e.point);
var delta = {
lng: coords.lng - startCoords[0],
lat: coords.lat - startCoords[1]
};
polygon.geometry.coordinates[0] = polygon.geometry.coordinates[0].map(function(coord) {
return [coord[0] + delta.lng, coord[1] + delta.lat];
});
map.getSource('polygon-source').setData(polygon);
});
map.on('mouseup', function(e) {
isDragging = false;
});
the polygon is being stored as a GeoJSON feature, and the polygon layer and source are named 'polygon-layer' and 'polygon-source', respectively. You will need to adjust these names to match your setup.