I have Problem in making solid ground in cannonjs - cannon.js

I have faced a problem while making solid ground that will make my sphere stop from falling through the plane…
the code below should prevent the sphere from falling through the plane, and also I know that you need to rotate the solid body...
const planeBody = new CANNON.Body({mass: 0})
but surprisingly it doesn’t have any effect at all, what should happen is that it should stop the sphere from falling through the plane…
also, there is another weird problem
the code below should make the plane fall along with the sphere if the sphere also has a mass equal to 1, and indeed that is what happens here
const planeBody = new CANNON.Body({mass: 1})
but when you try to rewrite it in this form, it stops from working
const planeBody = new CANNON.Body()
planeBody.mass = 1;
here is my complete code
import * as THREE from './threeJS/three.module.js';
import { OrbitControls } from './examples/jsm/controls/OrbitControls.js';
import { GUI } from '../dat.gui/files/dat.gui.module.js';
import * as THREEx from './threeX/threeX.keyboardState.js';
// gui
// const gui = new GUI();
// keyboard
const keyboard = new THREEx.default();
// console.log(THREEx.default);
const scene = new THREE.Scene();
scene.background = new THREE.Color(0x777777);
// Meshes
const plane = new THREE.Mesh(
new THREE.PlaneGeometry(40, 40),
new THREE.MeshStandardMaterial({ color: 0xdddddd, side: THREE.DoubleSide })
);
plane.rotateX(90 / 57.3);
const sphere = new THREE.Mesh(
new THREE.SphereGeometry(3, 64, 32),
new THREE.MeshStandardMaterial({ color: 'red' })
);
sphere.position.y = 5;
let clientWidth = document.documentElement.clientWidth;
let clientHeight = document.documentElement.clientHeight;
// prettier-ignore
const camera = new THREE.PerspectiveCamera(45, clientWidth / clientHeight, 0.1, 1000);
const canvas = document.querySelector('.webgl');
const controls = new OrbitControls(camera, canvas);
controls.enableDamping = true;
camera.position.set(0, 20, 30);
controls.update();
const renderer = new THREE.WebGLRenderer({
canvas: canvas,
antialias: true,
});
renderer.setSize(clientWidth, clientHeight);
renderer.shadowMap.enabled = true;
// lights
const ambientLight = new THREE.AmbientLight(0xffffff, 0.5);
const directionalLight = new THREE.DirectionalLight(0xffffff, 0.5);
directionalLight.position.set(0, 7, 5);
directionalLight.target = sphere;
directionalLight.target.updateMatrixWorld();
directionalLight.castShadow = true;
// shadows
// ! only three types of light that support shadows
// ! spotlight
// ! directional light
// ! point light
// ! castShadow means will it create a shadow? receiveShadow means will the shadow be on it?
sphere.castShadow = true;
plane.receiveShadow = true;
// Helpers
const AxesHelper = new THREE.AxesHelper(5);
scene.add(AxesHelper, camera, sphere, plane, directionalLight, ambientLight);
// Clock
const clock = new THREE.Clock();
let oldElapsedTime = 0;
// physics
const world = new CANNON.World();
world.gravity.set(0, -9.82, 0);
// Sphere
const sphereShape = new CANNON.Sphere(3);
const sphereBody = new CANNON.Body({
mass: 1,
position: new CANNON.Vec3(0, 5, 0),
shape: sphereShape,
});
world.addBody(sphereBody);
// infinite Plane
const planeShape = new CANNON.Plane();
const planeBody = new CANNON.Body({
mass: 0,
});
planeBody.addShape(planeShape);
planeBody.quaternion.setFromAxisAngle(new CANNON.Vec3(-1, 0, 0), Math.PI * 0.5);
world.addBody(planeBody);
function animate() {
requestAnimationFrame(animate);
let ElapsedTime = clock.getElapsedTime();
let deltaTime = ElapsedTime - oldElapsedTime;
oldElapsedTime = deltaTime;
// to update the world 60 times per second
world.step(1 / 60, deltaTime, 3);
directionalLight.position.z = 3 * Math.sin(ElapsedTime);
directionalLight.position.x = 3 * Math.cos(ElapsedTime);
directionalLight.lookAt(sphere.position);
sphere.position.copy(sphereBody.position);
plane.position.copy(planeBody.position);
// sphere.position.x = keyboard.pressed('right')
// ? sphere.position.x + 0.1
// : keyboard.pressed('left')
// ? sphere.position.x - 0.1
// : sphere.position.x;
// sphere.position.z = keyboard.pressed('up')
// ? sphere.position.z - 0.1
// : keyboard.pressed('down')
// ? sphere.position.z + 0.1
// : sphere.position.z;
controls.update();
renderer.render(scene, camera);
}
animate();
//events
window.addEventListener('resize', () => {
clientWidth = document.documentElement.clientWidth;
clientHeight = document.documentElement.clientHeight;
camera.aspect = clientWidth / clientHeight;
camera.updateProjectionMatrix();
renderer.setSize(clientWidth, clientHeight);
});
window.addEventListener('dblclick', () => {
if (!document.fullscreenElement) {
canvas.requestFullscreen();
} else {
document.exitFullscreen();
}
});

Related

Adding Hover Effect to p5.js Data Visualization not working?

I'm currently working on a data visualization in p5.js. The data is stored in a .csv and the values are mapped to constants. These constants are then used to manipulate the position, color and size of the circles.
Here is the code so far:
let table;
function preload(){
table = loadTable("assets/songs_normalize.csv", "csv", "header");
}
function setup() {
createCanvas (3000,3000);
background(133,124,222);
for (let r = 0; r < table.getRowCount( ); r++) {
const artist = table.getString(r, "artist");
const song = table.getString(r, "song");
const duration = table.getNum(r, "duration_ms");
const year = table.getNum(r,"year");
const tempo = table.getNum (r,"tempo");
const energy = table.getNum (r,"energy");
const popularity = table.getNum(r,"popularity");
const y = map(year,1998,2020,50,2950);
const x = map(tempo,60,211,50,2950);
const size = map(duration,0, 484146, 0, 100);
const color = map (popularity,0,89,0,255)
const opacity = map(energy,0.05,1,0,255)
let circleColor = colorfill(color/2,-color,color);
circleColor.setAlpha(opacity);
noStroke();
fill (circleColor);
circle(x,y,size);
}
}
function colorfill(r,g,b) {
return color(r, g, b);
}
And this is what it looks like:
Now I want to add a simple hover effect.
I found a tutorial online that seems to do just what I need but when applied to my project, no circles are drawn:
let table;
function preload(){
table = loadTable("assets/songs_normalize.csv", "csv", "header");
}
function setup() {
createCanvas (3000,3000);
background(133,124,222);
for (let r = 0; r < table.getRowCount( ); r++) {
const artist = table.getString(r, "artist");
const song = table.getString(r, "song");
const duration = table.getNum(r, "duration_ms");
const year = table.getNum(r,"year");
const tempo = table.getNum (r,"tempo");
const energy = table.getNum (r,"energy");
const popularity = table.getNum(r,"popularity");
const y = map(year,1998,2020,50,2950);
const x = map(tempo,60,211,50,2950);
const size = map(duration,0, 484146, 0, 100);
const color = map (popularity,0,89,0,255)
const opacity = map(energy,0.05,1,0,255)
let circleColor = colorfill(color/2,-color,color);
circleColor.setAlpha(opacity);
noStroke();
//fill (circleColor);
//circle(x,y,size);
}
}
function draw() {}
//hover
let dis = dist(mouseX, mouseY, x, y);
if (dis < 5) {
fill(0);
noStroke();
circle(x, y, size);
//draw information
text(artist,x,y);
text(song,x,y+10);
} else {
fill(circleColor);
noStroke();
circle(x, y, size);
}
function colorfill(r,g,b) {
return color(r, g, b);
}
You forgot to include the csv file while posting the question , However i managed to find that on kaggle csv .
Here is your solution - click here
There are several problems with your code
for Hover effects to work on canvas , we need to check the distance between the points on every frame , so the hover code must be inside draw function (your draw function is empty)
to check the distance between your mouse and each circle their co-ordinates and size must be stored somewhere , In the solution i stored them in an array of objects named songs

Mask out color in Image canvas flutter

I am trying to add skin tone to an image in flutter canvas.
I've used the following code before to apply chroma by altering pixels when I load the image:
static Future<ui.Image> applyChromaToImage(ui.Image image, String pathForImage, RGBPixel chromaToApply, {RGBPixel previousChromaColor}) async
{
List<ChromaPointRange> chromaPoints = processChromaImageBytes(
image, imgBytes);
for(ChromaPointRange rnge in chromaPoints)
{
for(int y = rnge.yValStart; y <= rnge.yValEnd; y++)
{
RGBPixel currentPixel = RGBPixel.generatePixelFromImagePos(imgBytes, image.width, rnge.xVal, y);
//replace current pixel with skin tone
RGBPixel newPixl = currentPixel.mergeSkinTonePixel(chromaToApply, previousChromaColor);
imgBytes.setUint32((y * image.width + rnge.xVal) * 4, newPixl.getHex());
}
}
final Completer<ui.Image> imageCompleter = new Completer();
//looks the endian format doesn't get set right here
ui.PixelFormat formatToUse = Endian.host == Endian.little ? ui.PixelFormat.rgba8888 : ui.PixelFormat.bgra8888;
ui.decodeImageFromPixels(
imgBytes.buffer.asUint8List(),
image.width,
image.height,
formatToUse,
(ui.Image result) {
imageCompleter.complete(result);
// use your result image
},
);
//return image;
return await imageCompleter.future;
}
static List<ChromaPointRange> processChromaImageBytes(ui.Image image, ByteData imgBytes)
{
List<ChromaPointRange> chromaPoints = [];
ChromaPointRange currentPoints = null;
for(int x = 0; x < image.width; x = x + 1)
{
for(int y = 0; y < image.height; y = y + 1)
{
RGBPixel currentPixel = RGBPixel.generatePixelFromImagePos(imgBytes, image.width, x, y);
if(currentPixel.isSkinTonePixel())
{
if(currentPoints == null)
{
currentPoints = ChromaPointRange.fromEmpty();
currentPoints.xVal = x;
currentPoints.yValStart = y;
}
}
else if(currentPoints != null)
{
currentPoints.yValEnd = y - 1;
chromaPoints.add(currentPoints);
currentPoints = null;
}
}
if(currentPoints != null)
{
currentPoints.yValEnd = image.height - 1;
chromaPoints.add(currentPoints);
currentPoints = null;
}
}
return chromaPoints;
}
which basically checks every pixel in the image to see if it's within a range of the target color ( with is RGB 0, 255, 0), then adjusts the pixel if it is. This works, but takes a really long time ~ 3 seconds for a 1920 x 1080 image.
The end result is that I want to paint the image to a canvas with a skin tone applied. I've tried a different strategy, by painting the color underneath the image, and then trying to mask out that color from the image using canvas color filters. This is 1000% faster, but doesn't quite work.
Here is the code:
renderSprite(Canvas canvasToRender, Offset offsetToRender)
{
Paint imgPaint = new Paint();
if(chromaToApply != null)
{
Paint chromaPaint = new Paint();
chromaPaint.colorFilter = ColorFilter.mode(Color.fromRGBO(chromaToApply.redVal, chromaToApply.greenVal, chromaToApply.blueVal, 1), BlendMode.modulate);
canvasToRender.drawImage(spriteImage, offsetToRender, chromaPaint);
imgPaint.colorFilter = ColorFilter.mode(Color.fromRGBO(0, 255, 0, 1), BlendMode.dstOut);
}
if(spriteImage != null)
canvasToRender.drawImage(spriteImage, offsetToRender, imgPaint);
}
Here is the image that is painted underneath
Here is the image that is painted ontop
So I'm trying to mask out the green so the tan skin tone shows through on specific parts of the image.
I can't seem to find any combination of ColorFilter or anything else that will mask out the green color for me from the canvas. Any suggestions?

Flutter Custom Painter - missing offset in coefficient calculation

first i transform my image file to dart:ui image. then i set sizes i will be using with this method
void setSizes() async {
imgOgSize =
Size(image.value!.width.toDouble(), image.value!.height.toDouble());
double wDiff = imgOgSize!.width - Get.width;
double hDiff = imgOgSize!.height - Get.height;
if (wDiff > hDiff) {
coef.value = imgOgSize!.width / Get.width;
canvasSize =
Size(imgOgSize!.width / coef.value, imgOgSize!.height / coef.value);
} else {
coef.value = imgOgSize!.height / Get.height;
canvasSize =
Size(imgOgSize!.width / coef.value, imgOgSize!.height / coef.value);
}
print(coef.value);
// canvasSize = Size(
// min(imgOgSize!.width, Get.width), min(imgOgSize!.height, Get.height));
}
my logic is finding size, that needs to be shrank more than the other and using it as a coefficient, because if i wanted to use fit: BoxFit.scaleDown i would not know what the coefficient would end up being.
the image is ready to be used on canvas, but i also need to get a list of points.
here is how i get the points
GestureDetector(
onPanDown: (detailData) {
controller.editor.value!.update(detailData.localPosition);
controller.canvasKey.currentContext!
.findRenderObject()!
.markNeedsPaint();
},
onPanUpdate: (detailData) {
controller.editor.value!.update(detailData.localPosition);
controller.canvasKey.currentContext!
.findRenderObject()!
.markNeedsPaint();
},
child: CustomPaint(
key: controller.canvasKey,
painter: controller.editor.value,
size: controller.canvasSize!,
),
)
now the overriden paint function looks like this with fullSize being false
void paint(Canvas canvas, Size size) {
final imageSize = Size(image.width.toDouble(), image.height.toDouble());
final src = Offset.zero & imageSize;
final dst = Offset.zero & size;
// canvas.drawImageRect(image, src, dst, Paint());
paintImage(
canvas: canvas,
rect: dst,
image: image,
// fit: BoxFit.scaleDown,
repeat: ImageRepeat.noRepeat,
scale: 1 / coef,
alignment: Alignment.center,
flipHorizontally: false,
filterQuality: FilterQuality.high,
);
for (Offset offset in points) {
if (fullSize) {
Offset of = Offset(offset.dx * coef, offset.dy * coef);
canvas.drawCircle(of, 10 * coef, painter);
} else {
canvas.drawCircle(offset, 10, painter);
}
}
}
result:
but now, when i want to export it with this function with image's original height and width, to have full quality picture (using imgOgSize instead of canvasSize)
Future<ui.Image> getImage() async {
final ui.PictureRecorder recorder = ui.PictureRecorder();
editor.value!.fullSize = true;
editor.value!.paint(Canvas(recorder), imgOgSize!);
editor.value!.fullSize = false;
final ui.Picture picture = recorder.endRecording();
ui.Image genImg = await picture.toImage(
imgOgSize!.width.toInt(), imgOgSize!.height.toInt());
// ui.Image genImg = await picture.toImage(canvasSize!.width.toInt(), canvasSize!.height.toInt());
final pngBytes = await genImg.toByteData(format: ui.ImageByteFormat.png);
final buffer = pngBytes!.buffer;
await File('/data/user/0/app/app_flutter/t1.png')
.delete();
await File('/data/user/0/app/app_flutter/t1.png')
.writeAsBytes(
buffer.asUint8List(pngBytes.offsetInBytes, pngBytes.lengthInBytes));
Get.find<HomeController>().imageViewPath.refresh();
Get.to(() => ImageView());
return await picture.toImage(
imgOgSize!.width.toInt(), imgOgSize!.height.toInt());
}
this is the exported .png result
am i calculating the coefficient wrong? or is there some offset im not taking in account? help please. i also can provide more code if something important is missing, let me know please.
edit: this answer works for phone orientation portrait, not landscape, which somehow still does not work. therefore i'm leaving the question open if someone can help.
i calculated the size wrong.
this is the correct setSizes()
void setSizes() {
imgOgSize =
Size(image.value!.width.toDouble(), image.value!.height.toDouble());
double wC = imgOgSize!.width / Get.width;
ouble hC = imgOgSize!.height / (Get.height - appBar.preferredSize.height);
if (wC > hC) {
coef.value = wC;
canvasSize =
Size(imgOgSize!.width / coef.value, imgOgSize!.height / coef.value);
} else {
coef.value = hC;
canvasSize =
Size(imgOgSize!.width / coef.value, imgOgSize!.height / coef.value);
}
print(coef.value);
// canvasSize = Size(
// min(imgOgSize!.width, Get.width), min(imgOgSize!.height, Get.height));
}

Web Audio API oscillator won't make any sound

I'm trying to test a simple oscillator not attached to any world object on the scene. Simply a tone, but following the example for a oscillator node does not seem to play anything. Does it require some kind of user interaction first to be allowed to play?
I also have a cube rotating on the scene, but it shouldn't affect anything. The script:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 0.1, 1000 );
var renderer = new THREE.WebGLRenderer();
renderer.setSize( window.innerWidth, window.innerHeight );
document.body.appendChild( renderer.domElement );
var geometry = new THREE.BoxGeometry();
var material = new THREE.MeshBasicMaterial( { color: 0x00ff00 } );
var cube = new THREE.Mesh( geometry, material );
scene.add( cube );
camera.position.z = 5;
function animate() {
requestAnimationFrame( animate );
cube.rotation.x += 0.05;
cube.rotation.y += 0.01;
renderer.render( scene, camera );
}
animate();
// create web audio api context
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
// create Oscillator node
var oscillator = audioCtx.createOscillator();
oscillator.type = 'square';
oscillator.frequency.setValueAtTime(440, audioCtx.currentTime); // value in hertz
var gainNode = audioCtx.createGain();
oscillator.connect(gainNode);
oscillator.connect(audioCtx.destination);
var initialFreq = 3000;
var initialVol = 0.500;
gainNode.gain.value = initialVol;
gainNode.gain.minValue = initialVol;
gainNode.gain.maxValue = initialVol;
oscillator.frequency.value = 3000;
oscillator.start();

Change the color of vertices and lines using canvas.drawVertices

The regular Paint object doesn't seem to alter the appearance of any lines drawn using drawVertices. Here's what I've tried.
// Doesn't do anything
var brush = Paint()..color = Colors.blue..strokeWidth = 4..style = PaintingStyle.stroke;
// The only shaders I've found are UI.Gradient and UI.ImageShader
brush = brush..shader = UI.Gradient.linear(
Offset(0, 0),
// absurd numbers
Offset(10000, 10000),
[Colors.blue, Colors.blue],
);
canvas.drawVertices(
UI.Vertices.raw(
VertexMode.triangleStrip,
storage,
// This will change the colors of the faces not the lines
// colors: _encodeColorList(storageCount.map((_) => Colors.white).toList())
),
BlendMode.dstIn,
brush,
);
// Copied from dart:ui
Int32List _encodeColorList(List<Color> colors) {
final int colorCount = colors.length;
final Int32List result = Int32List(colorCount);
for (int i = 0; i < colorCount; ++i) result[i] = colors[i].value;
return result;
}
The line colors are always black:
EDIT:
It might be a framework bug. Here is the Github issue for reference.