Skip to content

Commit

Permalink
Squashed commit of the following:
Browse files Browse the repository at this point in the history
commit 421e56d37d738c8a97dcd01fcee858de54954efc
Merge: 9c5325b 4eb61c0
Author: liabru <[email protected]>
Date:   Tue Jan 12 23:33:43 2021 +0000

    Merge branch 'removeDuplicatePoints' of https://github.com/tumult/matter-js into tumult-removeDuplicatePoints

    # Conflicts:
    #	src/factory/Bodies.js

commit 4eb61c0
Author: Jonathan Deutsch <[email protected]>
Date:   Thu Jul 26 15:37:00 2018 -0700

    change removeDuplicatePoints precision to default in docs

commit 6c5d406
Author: Jonathan Deutsch <[email protected]>
Date:   Thu Jul 26 14:33:39 2018 -0700

    Add removeDuplicatePoints option from polygon-decomp 0.3.0 to Bodies.fromVertices()
  • Loading branch information
liabru committed Jan 12, 2021
1 parent 9c5325b commit a9694e6
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 5 deletions.
66 changes: 62 additions & 4 deletions demo/lib/decomp.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ module.exports = {
quickDecomp: polygonQuickDecomp,
isSimple: polygonIsSimple,
removeCollinearPoints: polygonRemoveCollinearPoints,
removeDuplicatePoints: polygonRemoveDuplicatePoints,
makeCCW: polygonMakeCCW
};

Expand Down Expand Up @@ -179,6 +180,9 @@ function polygonMakeCCW(polygon){
// reverse poly if clockwise
if (!isLeft(polygonAt(polygon, br - 1), polygonAt(polygon, br), polygonAt(polygon, br + 1))) {
polygonReverse(polygon);
return true;
} else {
return false;
}
}

Expand Down Expand Up @@ -243,6 +247,27 @@ function polygonCanSee(polygon, a,b) {
return true;
}

/**
* Check if two vertices in the polygon can see each other
* @method canSee2
* @param {Number} a Vertex index 1
* @param {Number} b Vertex index 2
* @return {Boolean}
*/
function polygonCanSee2(polygon, a,b) {
// for each edge
for (var i = 0; i !== polygon.length; ++i) {
// ignore incident edges
if (i === a || i === b || (i + 1) % polygon.length === a || (i + 1) % polygon.length === b){
continue;
}
if( lineSegmentsIntersect(polygonAt(polygon, a), polygonAt(polygon, b), polygonAt(polygon, i), polygonAt(polygon, i+1)) ){
return false;
}
}
return true;
}

/**
* Copy the polygon from vertex i to vertex j.
* @method copy
Expand Down Expand Up @@ -526,9 +551,12 @@ function polygonQuickDecomp(polygon, result,reflexVertices,steinerPoints,delta,m
}

for (var j = lowerIndex; j <= upperIndex; ++j) {
if (isLeftOn(polygonAt(poly, i - 1), polygonAt(poly, i), polygonAt(poly, j)) && isRightOn(polygonAt(poly, i + 1), polygonAt(poly, i), polygonAt(poly, j))) {
if (
isLeftOn(polygonAt(poly, i - 1), polygonAt(poly, i), polygonAt(poly, j)) &&
isRightOn(polygonAt(poly, i + 1), polygonAt(poly, i), polygonAt(poly, j))
) {
d = sqdist(polygonAt(poly, i), polygonAt(poly, j));
if (d < closestDist) {
if (d < closestDist && polygonCanSee2(poly, i, j)) {
closestDist = d;
closestIndex = j % polygon.length;
}
Expand Down Expand Up @@ -585,6 +613,23 @@ function polygonRemoveCollinearPoints(polygon, precision){
return num;
}

/**
* Remove duplicate points in the polygon.
* @method removeDuplicatePoints
* @param {Number} [precision] The threshold to use when determining whether two points are the same. Use zero for best precision.
*/
function polygonRemoveDuplicatePoints(polygon, precision){
for(var i=polygon.length-1; i>=1; --i){
var pi = polygon[i];
for(var j=i-1; j>=0; --j){
if(points_eq(pi, polygon[j], precision)){
polygon.splice(i,1);
continue;
}
}
}
}

/**
* Check if two scalars are equal
* @static
Expand All @@ -596,9 +641,22 @@ function polygonRemoveCollinearPoints(polygon, precision){
*/
function scalar_eq(a,b,precision){
precision = precision || 0;
return Math.abs(a-b) < precision;
return Math.abs(a-b) <= precision;
}

/**
* Check if two points are equal
* @static
* @method points_eq
* @param {Array} a
* @param {Array} b
* @param {Number} [precision]
* @return {Boolean}
*/
function points_eq(a,b,precision){
return scalar_eq(a[0],b[0],precision) && scalar_eq(a[1],b[1],precision);
}

},{}]},{},[1])
(1)
});
});
6 changes: 5 additions & 1 deletion src/factory/Bodies.js
Original file line number Diff line number Diff line change
Expand Up @@ -193,9 +193,10 @@ var Vector = require('../geometry/Vector');
* @param {bool} [flagInternal=false]
* @param {number} [removeCollinear=0.01]
* @param {number} [minimumArea=10]
* @param {number} [removeDuplicatePoints=0.01]
* @return {body}
*/
Bodies.fromVertices = function(x, y, vertexSets, options, flagInternal, removeCollinear, minimumArea) {
Bodies.fromVertices = function(x, y, vertexSets, options, flagInternal, removeCollinear, minimumArea, removeDuplicatePoints) {
var globals = typeof global !== 'undefined' ? global : window,
decomp,
body,
Expand All @@ -220,6 +221,7 @@ var Vector = require('../geometry/Vector');
flagInternal = typeof flagInternal !== 'undefined' ? flagInternal : false;
removeCollinear = typeof removeCollinear !== 'undefined' ? removeCollinear : 0.01;
minimumArea = typeof minimumArea !== 'undefined' ? minimumArea : 10;
removeDuplicatePoints = typeof removeDuplicatePoints !== 'undefined' ? removeDuplicatePoints : 0.01;

if (!decomp) {
Common.warn('Bodies.fromVertices: poly-decomp.js required. Could not decompose vertices. Fallback to convex hull.');
Expand Down Expand Up @@ -256,6 +258,8 @@ var Vector = require('../geometry/Vector');
decomp.makeCCW(concave);
if (removeCollinear !== false)
decomp.removeCollinearPoints(concave, removeCollinear);
if (removeDuplicatePoints !== false)
decomp.removeDuplicatePoints(concave, removeDuplicatePoints);

// use the quick decomposition algorithm (Bayazit)
var decomposed = decomp.quickDecomp(concave);
Expand Down

0 comments on commit a9694e6

Please sign in to comment.