简体   繁体   中英

Emit click events by coords x, y fabric js

I started to develop the project using three.js and fabric.js, where i can move and transform texture on the 3d model. I found coords on model and sync it with coords on canvas fabric js. But i don`t know how to simulate click event and get full manage this clicked objects. I found the stackoverflow topics that demonstrate my problem. Much thanks!

Programmatically select object in a Fabricjs canvas from coords

Raycast mouse clicks from Threejs model to the Fabricjs canvas used for the texture

Emit click events from one canvas to another

Codepen:

https://codepen.io/ricardcreagia/pen/EdEGod

        console.clear();
        console.log("starting scripts...");

        /**
         * Fabricjs
         * @type {fabric}
         */

        var canvas = new fabric.Canvas( "canvas" );
        canvas.backgroundColor = "#FFBE9F";

        var rectangle = new fabric.Rect( {
            top: 100,
            left: 100,
            fill: '#FF6E27',
            width: 100,
            height: 100,
            transparentCorners: false,
            centeredScaling: true,
            borderColor: 'black',
            cornerColor: 'black',
            corcerStrokeColor: 'black'
        } );

        canvas.add( rectangle );


        /**
         * Threejs
         */

        var containerHeight = "512";
        var containerWidth = "512";
        var camera, renderer, container, scene, texture, material, geometry,
            cube;

        var raycaster = new THREE.Raycaster();
        var mouse = new THREE.Vector2();
        var onClickPosition = new THREE.Vector2();

        init();
        animate();


        /**
         * Configurator init function
         */

        function init() {

            /**
             * Camera
             */

            camera = new THREE.PerspectiveCamera( 30, window.innerWidth / window.innerHeight, 0.01, 100 );
            camera.position.set( 0, 0, 3.5 );


            /**
             * Renderer
             */

            container = document.getElementById( "renderer" );
            renderer = new THREE.WebGLRenderer( { antialias: true } );
            renderer.setPixelRatio( window.devicePixelRatio );
            renderer.setSize( containerWidth, containerHeight );
            camera.aspect = container.clientWidth / container.clientHeight;
            camera.updateProjectionMatrix();
            container.appendChild( renderer.domElement );


            /**
             * Scene
             */

            scene = new THREE.Scene();
            scene.background = new THREE.Color( 0x000000 );


            /**
             * Texture and material
             */

            texture = new THREE.Texture( document.getElementById( "canvas" ) );
            texture.anisotropy = renderer.capabilities.getMaxAnisotropy();

            material = new THREE.MeshBasicMaterial( { map: texture } );


            /**
             * Model
             */

             geometry = new THREE.BoxGeometry( 1, 1, 1 );
             cube = new THREE.Mesh( geometry, material );
             scene.add( cube );
        }


        /**
         * Configurator frame render function
         */

        function animate() {
            requestAnimationFrame( animate );

            cube.rotation.x += 0.004;
            cube.rotation.y += 0.001;
            texture.needsUpdate = true;

            renderer.render( scene, camera );
        }


        /**
         * Listeners
         */

        container.addEventListener( "mousedown", onMouseClick, false );


        /**
         * Other methods
         */

        function onMouseClick( evt ) {
            evt.preventDefault();

            var array = getMousePosition( container, evt.clientX, evt.clientY );
            onClickPosition.fromArray( array );

            var intersects = getIntersects( onClickPosition, scene.children );

            if ( intersects.length > 0 && intersects[ 0 ].uv ) {
                var uv = intersects[ 0 ].uv;
                intersects[ 0 ].object.material.map.transformUv( uv );

                var circle = new fabric.Circle({
                    radius: 3,
                    left: getRealPosition( "x", uv.x ),
                    top: getRealPosition( "y", uv.y ),
                    fill: 'red'
                });
                canvas.add( circle );
            }
        }

        function getRealPosition( axis, value ) {
            let CORRECTION_VALUE = axis === "x"
                                    ? 4.5
                                    : 5.5;

            return Math.round( value * 512 ) - CORRECTION_VALUE;
        }

        var getMousePosition = function ( dom, x, y ) {
            var rect = dom.getBoundingClientRect();
            return [ ( x - rect.left ) / rect.width, ( y - rect.top ) / rect.height ];
        };

        var getIntersects = function ( point, objects ) {
            mouse.set( ( point.x * 2 ) - 1, - ( point.y * 2 ) + 1 );
            raycaster.setFromCamera( mouse, camera );
            return raycaster.intersectObjects( objects );
        };

Please note that this code is not mine, I just found it on stackoverflow and do not claim authorship

Some of the answers to the older questions mention the right way to mirror mouse events from one canvas to another:

var simEvt = new MouseEvent(evt.type, {
  clientX: correctedPosition.x,
  clientY: correctedPosition.y
});
canvas.upperCanvasEl.dispatchEvent(simEvt);

Unfortunately, it's not enough to just dispatch mousedown , mouseup , and mousemove events on target canvas. The reason is that when mousedown event is fired on canvas, fabric.js immediately removes mouseup and mousemove listeners and reattaches them to document instead (I'm guessing because it wants to continue listening to those events even after the mouse leaves the canvas). This means that even though we dispatch events on target canvas, fabric.js ignores them right until moueup is fired on document .

One way around this is to patch fabric.js internals to be able to detect that a mouse event has a different dom target than our target canvas, and if that happens, correct the x / y position in a suitable way.

I'm using the following piece of code to patch fabric.Canvas.prototype.getPointer :

// ...
if (e.target !== this.upperCanvasEl) {
  var positionOnScene = getPositionOnScene(container, e);
  pointer.x = positionOnScene.x;
  pointer.y = positionOnScene.y;
}
// ...

( getPositionOnScene() is a helper method that calculates the pointer's position within the mirrored canvas; container is a dom element Three.js uses to render a scene).

I've put up a demo using the code from the pen in your question: https://codepen.io/shkaper/pen/eYOBQVL

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM