javascript - How do I translate vertices in a THREE.PointCloud object? - Stack Overflow

I am trying to: Draw a THREE.PointCloud object with approx. 150k points where points are sent from a we

I am trying to:

  • Draw a THREE.PointCloud object with approx. 150k points where points are sent from a web application.
  • Scale the points in the THREE.PointCloud object to achieve a result similar to this (rendered using MayaVi):

The problem is that:

  • Data passed to the THREE.PointCloud object seems to be inaccurate
  • When rendered in three.js, points are arranged into eight cubes, for unknown reasons (I'm not applying any scaling, or transformations to the points)

Example server response (I have included sample data at the bottom of this post):

{'geometry': [[-156, 65, 89],
              [268, 84, 337],
              [-205, 68, 170],
              [-87, 69, 52],
              ...
              [289, 81, 143],
              [141, 78, 280],
              [403, 75, 351]],
 'metadata': {'max': {'x': 421, 'y': 105, 'z': 458},
          'min': {'x': -335, 'y': 63, 'z': 39}}}

The three.js code used to create the point cloud:

  var container;
  var scene, camera, renderer, controls;
  var geometry, material, mesh;

  init();
  animate();

  function init() {
    scene = new THREE.Scene();
    camera = new THREE.PerspectiveCamera(27, window.innerWidth / window.innerHeight, 5, 5000);
    camera.position.z = 2750;

    //Add a buffer geometry for particle system
    var geometry = new THREE.BufferGeometry();
    var particles = {{ len(topology['geometry']) }};
    var geometry = new THREE.BufferGeometry();
    var positions = new Float32Array(particles * 3);
    var colors = new Float32Array(particles * 3);
    var color = new THREE.Color();

    var i = 0;
    {% for point in topology['geometry'] %}
      var x = {{ point[0] }};
      var y = {{ point[1] }};
      var z = {{ point[2] }};

      //Store the position of the point
      positions[i]     = x;
      positions[i + 1] = y;
      positions[i + 2] = z;

      //Assign a colour to the point
      color.setRGB(0.42, 0.42, 0.42);
      colors[i]     = color.r;
      colors[i + 1] = color.g;
      colors[i + 2] = color.b;
      i+=1;
    {% end %}

    geometry.addAttribute('position', new THREE.BufferAttribute(positions, 3));
    geometry.addAttribute('color', new THREE.BufferAttribute(colors, 3));
    geometryputeBoundingSphere();

    var material = new THREE.PointCloudMaterial({ size: 15, vertexColors: THREE.VertexColors });
    particleSystem = new THREE.PointCloud(geometry, material);
    scene.add(particleSystem);

    //Lights
    light = new THREE.DirectionalLight(0xffffff);
    light.position.set(1, 1, 1);
    scene.add(light);

    //Set up renderer
    renderer = new THREE.WebGLRenderer({ antialias:false });
    renderer.setSize(window.innerWidth, window.innerHeight);
    renderer.setPixelRatio(window.devicePixelRatio);

    //Attach renderer to #container DOM element
    container = document.getElementById('container');
    container.appendChild(renderer.domElement);

    //Add window listener for resize events
    window.addEventListener('resize', onWindowResize, false);

    //Call render loop
    animate();
  }

  function onWindowResize(){
    camera.aspect = window.innerWidth / window.innerHeight;
    camera.updateProjectionMatrix();
    renderer.setSize(window.innerWidth, window.innerHeight);
    render();
  }

  function animate() {
    requestAnimationFrame(animate);
    renderer.render(scene, camera);
  }

  function render(){
    renderer.render(scene, camera);
  }

The scene ends up looking like this:

Any suggestions? I've used the following example code, but I'm having difficulty properly implementing scaling for the points in my dataset:

Link to a sample of data that I am working with (2MB, 180k lines): .asc

I am trying to:

  • Draw a THREE.PointCloud object with approx. 150k points where points are sent from a web application.
  • Scale the points in the THREE.PointCloud object to achieve a result similar to this (rendered using MayaVi):

The problem is that:

  • Data passed to the THREE.PointCloud object seems to be inaccurate
  • When rendered in three.js, points are arranged into eight cubes, for unknown reasons (I'm not applying any scaling, or transformations to the points)

Example server response (I have included sample data at the bottom of this post):

{'geometry': [[-156, 65, 89],
              [268, 84, 337],
              [-205, 68, 170],
              [-87, 69, 52],
              ...
              [289, 81, 143],
              [141, 78, 280],
              [403, 75, 351]],
 'metadata': {'max': {'x': 421, 'y': 105, 'z': 458},
          'min': {'x': -335, 'y': 63, 'z': 39}}}

The three.js code used to create the point cloud:

  var container;
  var scene, camera, renderer, controls;
  var geometry, material, mesh;

  init();
  animate();

  function init() {
    scene = new THREE.Scene();
    camera = new THREE.PerspectiveCamera(27, window.innerWidth / window.innerHeight, 5, 5000);
    camera.position.z = 2750;

    //Add a buffer geometry for particle system
    var geometry = new THREE.BufferGeometry();
    var particles = {{ len(topology['geometry']) }};
    var geometry = new THREE.BufferGeometry();
    var positions = new Float32Array(particles * 3);
    var colors = new Float32Array(particles * 3);
    var color = new THREE.Color();

    var i = 0;
    {% for point in topology['geometry'] %}
      var x = {{ point[0] }};
      var y = {{ point[1] }};
      var z = {{ point[2] }};

      //Store the position of the point
      positions[i]     = x;
      positions[i + 1] = y;
      positions[i + 2] = z;

      //Assign a colour to the point
      color.setRGB(0.42, 0.42, 0.42);
      colors[i]     = color.r;
      colors[i + 1] = color.g;
      colors[i + 2] = color.b;
      i+=1;
    {% end %}

    geometry.addAttribute('position', new THREE.BufferAttribute(positions, 3));
    geometry.addAttribute('color', new THREE.BufferAttribute(colors, 3));
    geometry.puteBoundingSphere();

    var material = new THREE.PointCloudMaterial({ size: 15, vertexColors: THREE.VertexColors });
    particleSystem = new THREE.PointCloud(geometry, material);
    scene.add(particleSystem);

    //Lights
    light = new THREE.DirectionalLight(0xffffff);
    light.position.set(1, 1, 1);
    scene.add(light);

    //Set up renderer
    renderer = new THREE.WebGLRenderer({ antialias:false });
    renderer.setSize(window.innerWidth, window.innerHeight);
    renderer.setPixelRatio(window.devicePixelRatio);

    //Attach renderer to #container DOM element
    container = document.getElementById('container');
    container.appendChild(renderer.domElement);

    //Add window listener for resize events
    window.addEventListener('resize', onWindowResize, false);

    //Call render loop
    animate();
  }

  function onWindowResize(){
    camera.aspect = window.innerWidth / window.innerHeight;
    camera.updateProjectionMatrix();
    renderer.setSize(window.innerWidth, window.innerHeight);
    render();
  }

  function animate() {
    requestAnimationFrame(animate);
    renderer.render(scene, camera);
  }

  function render(){
    renderer.render(scene, camera);
  }

The scene ends up looking like this:

Any suggestions? I've used the following example code, but I'm having difficulty properly implementing scaling for the points in my dataset: http://threejs/examples/#webgl_buffergeometry_particles

Link to a sample of data that I am working with (2MB, 180k lines): https://gist.githubusercontent./TylerJFisher/659e3e233f8aa458feee/raw/889c0dd0093fd0476094af48488aab62c8666271/topology.asc

Share Improve this question edited Mar 6, 2015 at 16:30 Tyler asked Mar 4, 2015 at 22:19 TylerTyler 2831 gold badge3 silver badges12 bronze badges
Add a ment  | 

1 Answer 1

Reset to default 5

I used your sample data. Put it in an array, like this:

var data = [
"-156 65 89",
"268 84 337",
"-205 68 170",
"-87 69 52",
...
];

and used THREE.Geometry() for PointCloud:

            var geometry = new THREE.Geometry();
            var colors = [];

            for ( var x = 0; x < data.length; x++){
                    var pointCoord = data[ x ].split(" ");
                    if ( pointCoord.length != 3 ) continue;
                    var currentColor = new THREE.Color( 0.5, 1, 0.5 );
                    colors.push( currentColor );
                    geometry.vertices.push(
                        new THREE.Vector3(
                            pointCoord[2],
                            pointCoord[1],
                            pointCoord[0]
                        )
                    );
                };
            //

            console.log( geometry.vertices.length );

            geometry.colors = colors;

            var material = new THREE.PointCloudMaterial( { size: 1, vertexColors: THREE.VertexColors } );

            particleSystem = new THREE.PointCloud( geometry, material );
            scene.add( particleSystem );

Also, in geodata, coordinates x and y are always swapped (in this case, there are x and z). If you won't do it, you'll get mirrored object then. That's why I put it as

                        new THREE.Vector3(
                            pointCoord[2],
                            pointCoord[1],
                            pointCoord[0]
                        )

instead of

                        new THREE.Vector3(
                            pointCoord[0],
                            pointCoord[1],
                            pointCoord[2]
                        )

The result is here: geodata

And yes, some lines in your sample data seem incorrect. Means they have 1 or 2 values instead of 3.

发布者:admin,转转请注明出处:http://www.yc00.com/questions/1745442223a4627881.html

相关推荐

发表回复

评论列表(0条)

  • 暂无评论

联系我们

400-800-8888

在线咨询: QQ交谈

邮件:admin@example.com

工作时间:周一至周五,9:30-18:30,节假日休息

关注微信