2013-11-22 21:38:30 +00:00
|
|
|
#include "Geometry.hpp"
|
2015-01-30 17:33:20 +00:00
|
|
|
#include "ClipperUtils.hpp"
|
2014-11-15 21:41:22 +00:00
|
|
|
#include "ExPolygon.hpp"
|
2014-01-10 15:18:55 +00:00
|
|
|
#include "Line.hpp"
|
|
|
|
#include "PolylineCollection.hpp"
|
2013-11-23 22:21:59 +00:00
|
|
|
#include "clipper.hpp"
|
2013-11-22 21:38:30 +00:00
|
|
|
#include <algorithm>
|
2016-03-19 14:33:58 +00:00
|
|
|
#include <cassert>
|
2014-03-05 17:43:01 +00:00
|
|
|
#include <cmath>
|
2014-03-04 22:33:13 +00:00
|
|
|
#include <list>
|
2013-11-23 20:39:05 +00:00
|
|
|
#include <map>
|
2014-01-10 10:47:16 +00:00
|
|
|
#include <set>
|
2016-03-19 14:33:58 +00:00
|
|
|
#include <utility>
|
2013-12-12 19:19:33 +00:00
|
|
|
#include <vector>
|
2014-03-03 00:48:05 +00:00
|
|
|
|
|
|
|
#ifdef SLIC3R_DEBUG
|
|
|
|
#include "SVG.hpp"
|
|
|
|
#endif
|
2014-01-09 18:56:12 +00:00
|
|
|
|
|
|
|
using namespace boost::polygon; // provides also high() and low()
|
2013-11-22 21:38:30 +00:00
|
|
|
|
2013-11-23 20:54:56 +00:00
|
|
|
namespace Slic3r { namespace Geometry {
|
2013-11-22 21:38:30 +00:00
|
|
|
|
|
|
|
static bool
|
|
|
|
sort_points (Point a, Point b)
|
|
|
|
{
|
|
|
|
return (a.x < b.x) || (a.x == b.x && a.y < b.y);
|
|
|
|
}
|
|
|
|
|
2013-12-12 19:19:33 +00:00
|
|
|
/* This implementation is based on Andrew's monotone chain 2D convex hull algorithm */
|
2015-01-19 17:53:04 +00:00
|
|
|
Polygon
|
|
|
|
convex_hull(Points points)
|
2013-11-22 21:38:30 +00:00
|
|
|
{
|
2013-11-24 21:42:52 +00:00
|
|
|
assert(points.size() >= 3);
|
2013-11-22 21:38:30 +00:00
|
|
|
// sort input points
|
|
|
|
std::sort(points.begin(), points.end(), sort_points);
|
|
|
|
|
2013-12-12 19:19:33 +00:00
|
|
|
int n = points.size(), k = 0;
|
2015-01-19 17:53:04 +00:00
|
|
|
Polygon hull;
|
|
|
|
hull.points.resize(2*n);
|
2013-12-12 19:19:33 +00:00
|
|
|
|
|
|
|
// Build lower hull
|
|
|
|
for (int i = 0; i < n; i++) {
|
2015-01-19 17:53:04 +00:00
|
|
|
while (k >= 2 && points[i].ccw(hull.points[k-2], hull.points[k-1]) <= 0) k--;
|
|
|
|
hull.points[k++] = points[i];
|
2013-11-22 21:38:30 +00:00
|
|
|
}
|
2013-12-12 19:19:33 +00:00
|
|
|
|
|
|
|
// Build upper hull
|
|
|
|
for (int i = n-2, t = k+1; i >= 0; i--) {
|
2015-01-19 17:53:04 +00:00
|
|
|
while (k >= t && points[i].ccw(hull.points[k-2], hull.points[k-1]) <= 0) k--;
|
|
|
|
hull.points[k++] = points[i];
|
2013-11-22 21:38:30 +00:00
|
|
|
}
|
2013-12-12 19:19:33 +00:00
|
|
|
|
2015-01-19 17:53:04 +00:00
|
|
|
hull.points.resize(k);
|
|
|
|
|
|
|
|
assert( hull.points.front().coincides_with(hull.points.back()) );
|
|
|
|
hull.points.pop_back();
|
2013-11-22 21:38:30 +00:00
|
|
|
|
2015-01-19 17:53:04 +00:00
|
|
|
return hull;
|
2013-11-22 21:38:30 +00:00
|
|
|
}
|
|
|
|
|
2015-01-19 17:53:04 +00:00
|
|
|
Polygon
|
|
|
|
convex_hull(const Polygons &polygons)
|
2014-11-09 14:27:34 +00:00
|
|
|
{
|
|
|
|
Points pp;
|
|
|
|
for (Polygons::const_iterator p = polygons.begin(); p != polygons.end(); ++p) {
|
|
|
|
pp.insert(pp.end(), p->points.begin(), p->points.end());
|
|
|
|
}
|
2015-01-19 17:53:04 +00:00
|
|
|
return convex_hull(pp);
|
2014-11-09 14:27:34 +00:00
|
|
|
}
|
|
|
|
|
2013-11-23 20:39:05 +00:00
|
|
|
/* accepts an arrayref of points and returns a list of indices
|
|
|
|
according to a nearest-neighbor walk */
|
|
|
|
void
|
2014-11-09 11:25:59 +00:00
|
|
|
chained_path(const Points &points, std::vector<Points::size_type> &retval, Point start_near)
|
2013-11-23 20:39:05 +00:00
|
|
|
{
|
2014-11-09 11:25:59 +00:00
|
|
|
PointConstPtrs my_points;
|
|
|
|
std::map<const Point*,Points::size_type> indices;
|
2013-11-23 20:39:05 +00:00
|
|
|
my_points.reserve(points.size());
|
2014-11-09 11:25:59 +00:00
|
|
|
for (Points::const_iterator it = points.begin(); it != points.end(); ++it) {
|
2013-11-23 20:39:05 +00:00
|
|
|
my_points.push_back(&*it);
|
|
|
|
indices[&*it] = it - points.begin();
|
|
|
|
}
|
|
|
|
|
|
|
|
retval.reserve(points.size());
|
|
|
|
while (!my_points.empty()) {
|
|
|
|
Points::size_type idx = start_near.nearest_point_index(my_points);
|
|
|
|
start_near = *my_points[idx];
|
|
|
|
retval.push_back(indices[ my_points[idx] ]);
|
|
|
|
my_points.erase(my_points.begin() + idx);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2014-11-09 11:25:59 +00:00
|
|
|
chained_path(const Points &points, std::vector<Points::size_type> &retval)
|
2013-11-23 20:39:05 +00:00
|
|
|
{
|
|
|
|
if (points.empty()) return; // can't call front() on empty vector
|
|
|
|
chained_path(points, retval, points.front());
|
|
|
|
}
|
|
|
|
|
2013-11-23 22:21:59 +00:00
|
|
|
/* retval and items must be different containers */
|
|
|
|
template<class T>
|
|
|
|
void
|
|
|
|
chained_path_items(Points &points, T &items, T &retval)
|
|
|
|
{
|
|
|
|
std::vector<Points::size_type> indices;
|
|
|
|
chained_path(points, indices);
|
|
|
|
for (std::vector<Points::size_type>::const_iterator it = indices.begin(); it != indices.end(); ++it)
|
|
|
|
retval.push_back(items[*it]);
|
|
|
|
}
|
|
|
|
template void chained_path_items(Points &points, ClipperLib::PolyNodes &items, ClipperLib::PolyNodes &retval);
|
|
|
|
|
2014-05-02 16:46:22 +00:00
|
|
|
bool
|
|
|
|
directions_parallel(double angle1, double angle2, double max_diff)
|
|
|
|
{
|
|
|
|
double diff = fabs(angle1 - angle2);
|
|
|
|
max_diff += EPSILON;
|
|
|
|
return diff < max_diff || fabs(diff - PI) < max_diff;
|
|
|
|
}
|
|
|
|
|
2014-11-15 21:41:22 +00:00
|
|
|
template<class T>
|
|
|
|
bool
|
2014-11-23 19:14:13 +00:00
|
|
|
contains(const std::vector<T> &vector, const Point &point)
|
2014-11-15 21:41:22 +00:00
|
|
|
{
|
|
|
|
for (typename std::vector<T>::const_iterator it = vector.begin(); it != vector.end(); ++it) {
|
2014-11-23 19:14:13 +00:00
|
|
|
if (it->contains(point)) return true;
|
2014-11-15 21:41:22 +00:00
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
2014-11-23 19:14:13 +00:00
|
|
|
template bool contains(const ExPolygons &vector, const Point &point);
|
2014-11-15 21:41:22 +00:00
|
|
|
|
|
|
|
double
|
|
|
|
rad2deg(double angle)
|
|
|
|
{
|
|
|
|
return angle / PI * 180.0;
|
|
|
|
}
|
|
|
|
|
|
|
|
double
|
|
|
|
rad2deg_dir(double angle)
|
|
|
|
{
|
|
|
|
angle = (angle < PI) ? (-angle + PI/2.0) : (angle + PI/2.0);
|
|
|
|
if (angle < 0) angle += PI;
|
|
|
|
return rad2deg(angle);
|
|
|
|
}
|
|
|
|
|
|
|
|
double
|
|
|
|
deg2rad(double angle)
|
|
|
|
{
|
|
|
|
return PI * angle / 180.0;
|
|
|
|
}
|
|
|
|
|
2015-01-30 17:33:20 +00:00
|
|
|
void
|
|
|
|
simplify_polygons(const Polygons &polygons, double tolerance, Polygons* retval)
|
|
|
|
{
|
|
|
|
Polygons pp;
|
|
|
|
for (Polygons::const_iterator it = polygons.begin(); it != polygons.end(); ++it) {
|
|
|
|
Polygon p = *it;
|
|
|
|
p.points.push_back(p.points.front());
|
|
|
|
p.points = MultiPoint::_douglas_peucker(p.points, tolerance);
|
|
|
|
p.points.pop_back();
|
|
|
|
pp.push_back(p);
|
|
|
|
}
|
|
|
|
Slic3r::simplify_polygons(pp, retval);
|
|
|
|
}
|
|
|
|
|
2015-04-29 17:19:07 +00:00
|
|
|
double
|
|
|
|
linint(double value, double oldmin, double oldmax, double newmin, double newmax)
|
|
|
|
{
|
|
|
|
return (value - oldmin) * (newmax - newmin) / (oldmax - oldmin) + newmin;
|
|
|
|
}
|
|
|
|
|
|
|
|
Pointfs
|
2015-12-02 17:06:18 +00:00
|
|
|
arrange(size_t total_parts, Pointf part, coordf_t dist, const BoundingBoxf* bb)
|
2015-04-29 17:19:07 +00:00
|
|
|
{
|
|
|
|
// use actual part size (the largest) plus separation distance (half on each side) in spacing algorithm
|
|
|
|
part.x += dist;
|
|
|
|
part.y += dist;
|
|
|
|
|
|
|
|
Pointf area;
|
2015-12-02 18:39:16 +00:00
|
|
|
if (bb != NULL && bb->defined) {
|
2015-12-02 17:06:18 +00:00
|
|
|
area = bb->size();
|
2015-04-29 17:19:07 +00:00
|
|
|
} else {
|
|
|
|
// bogus area size, large enough not to trigger the error below
|
|
|
|
area.x = part.x * total_parts;
|
|
|
|
area.y = part.y * total_parts;
|
|
|
|
}
|
|
|
|
|
|
|
|
// this is how many cells we have available into which to put parts
|
|
|
|
size_t cellw = floor((area.x + dist) / part.x);
|
2015-12-13 23:15:04 +00:00
|
|
|
size_t cellh = floor((area.y + dist) / part.y);
|
2015-04-29 17:19:07 +00:00
|
|
|
if (total_parts > (cellw * cellh))
|
|
|
|
CONFESS("%zu parts won't fit in your print area!\n", total_parts);
|
|
|
|
|
|
|
|
// total space used by cells
|
|
|
|
Pointf cells(cellw * part.x, cellh * part.y);
|
|
|
|
|
|
|
|
// bounding box of total space used by cells
|
|
|
|
BoundingBoxf cells_bb;
|
|
|
|
cells_bb.merge(Pointf(0,0)); // min
|
|
|
|
cells_bb.merge(cells); // max
|
|
|
|
|
|
|
|
// center bounding box to area
|
|
|
|
cells_bb.translate(
|
2015-12-13 23:15:04 +00:00
|
|
|
(area.x - cells.x) / 2,
|
|
|
|
(area.y - cells.y) / 2
|
2015-04-29 17:19:07 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
// list of cells, sorted by distance from center
|
|
|
|
std::vector<ArrangeItemIndex> cellsorder;
|
|
|
|
|
|
|
|
// work out distance for all cells, sort into list
|
|
|
|
for (size_t i = 0; i <= cellw-1; ++i) {
|
|
|
|
for (size_t j = 0; j <= cellh-1; ++j) {
|
|
|
|
coordf_t cx = linint(i + 0.5, 0, cellw, cells_bb.min.x, cells_bb.max.x);
|
2015-12-13 23:15:04 +00:00
|
|
|
coordf_t cy = linint(j + 0.5, 0, cellh, cells_bb.min.y, cells_bb.max.y);
|
2015-04-29 17:19:07 +00:00
|
|
|
|
|
|
|
coordf_t xd = fabs((area.x / 2) - cx);
|
|
|
|
coordf_t yd = fabs((area.y / 2) - cy);
|
|
|
|
|
|
|
|
ArrangeItem c;
|
|
|
|
c.pos.x = cx;
|
|
|
|
c.pos.y = cy;
|
|
|
|
c.index_x = i;
|
|
|
|
c.index_y = j;
|
|
|
|
c.dist = xd * xd + yd * yd - fabs((cellw / 2) - (i + 0.5));
|
|
|
|
|
|
|
|
// binary insertion sort
|
|
|
|
{
|
|
|
|
coordf_t index = c.dist;
|
|
|
|
size_t low = 0;
|
|
|
|
size_t high = cellsorder.size();
|
|
|
|
while (low < high) {
|
|
|
|
size_t mid = (low + ((high - low) / 2)) | 0;
|
|
|
|
coordf_t midval = cellsorder[mid].index;
|
|
|
|
|
|
|
|
if (midval < index) {
|
|
|
|
low = mid + 1;
|
|
|
|
} else if (midval > index) {
|
|
|
|
high = mid;
|
|
|
|
} else {
|
|
|
|
cellsorder.insert(cellsorder.begin() + mid, ArrangeItemIndex(index, c));
|
|
|
|
goto ENDSORT;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
cellsorder.insert(cellsorder.begin() + low, ArrangeItemIndex(index, c));
|
|
|
|
}
|
|
|
|
ENDSORT: true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// the extents of cells actually used by objects
|
|
|
|
coordf_t lx = 0;
|
|
|
|
coordf_t ty = 0;
|
|
|
|
coordf_t rx = 0;
|
|
|
|
coordf_t by = 0;
|
|
|
|
|
|
|
|
// now find cells actually used by objects, map out the extents so we can position correctly
|
|
|
|
for (size_t i = 1; i <= total_parts; ++i) {
|
|
|
|
ArrangeItemIndex c = cellsorder[i - 1];
|
|
|
|
coordf_t cx = c.item.index_x;
|
|
|
|
coordf_t cy = c.item.index_y;
|
|
|
|
if (i == 1) {
|
|
|
|
lx = rx = cx;
|
|
|
|
ty = by = cy;
|
|
|
|
} else {
|
|
|
|
if (cx > rx) rx = cx;
|
|
|
|
if (cx < lx) lx = cx;
|
|
|
|
if (cy > by) by = cy;
|
|
|
|
if (cy < ty) ty = cy;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// now we actually place objects into cells, positioned such that the left and bottom borders are at 0
|
|
|
|
Pointfs positions;
|
|
|
|
for (size_t i = 1; i <= total_parts; ++i) {
|
|
|
|
ArrangeItemIndex c = cellsorder.front();
|
|
|
|
cellsorder.erase(cellsorder.begin());
|
|
|
|
coordf_t cx = c.item.index_x - lx;
|
|
|
|
coordf_t cy = c.item.index_y - ty;
|
|
|
|
|
|
|
|
positions.push_back(Pointf(cx * part.x, cy * part.y));
|
|
|
|
}
|
|
|
|
|
2015-12-02 18:39:16 +00:00
|
|
|
if (bb != NULL && bb->defined) {
|
2015-04-29 17:19:07 +00:00
|
|
|
for (Pointfs::iterator p = positions.begin(); p != positions.end(); ++p) {
|
2015-12-02 17:06:18 +00:00
|
|
|
p->x += bb->min.x;
|
|
|
|
p->y += bb->min.y;
|
2015-04-29 17:19:07 +00:00
|
|
|
}
|
|
|
|
}
|
2015-12-13 23:15:04 +00:00
|
|
|
|
2015-04-29 17:19:07 +00:00
|
|
|
return positions;
|
|
|
|
}
|
|
|
|
|
2014-01-09 18:56:12 +00:00
|
|
|
void
|
2016-03-19 14:33:58 +00:00
|
|
|
MedialAxis::build(ThickPolylines* polylines)
|
2014-01-09 18:56:12 +00:00
|
|
|
{
|
|
|
|
construct_voronoi(this->lines.begin(), this->lines.end(), &this->vd);
|
|
|
|
|
2014-05-21 13:03:31 +00:00
|
|
|
/*
|
|
|
|
// DEBUG: dump all Voronoi edges
|
|
|
|
{
|
|
|
|
for (VD::const_edge_iterator edge = this->vd.edges().begin(); edge != this->vd.edges().end(); ++edge) {
|
|
|
|
if (edge->is_infinite()) continue;
|
|
|
|
|
2016-03-19 14:33:58 +00:00
|
|
|
ThickPolyline polyline;
|
2014-05-21 13:03:31 +00:00
|
|
|
polyline.points.push_back(Point( edge->vertex0()->x(), edge->vertex0()->y() ));
|
|
|
|
polyline.points.push_back(Point( edge->vertex1()->x(), edge->vertex1()->y() ));
|
|
|
|
polylines->push_back(polyline);
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
*/
|
|
|
|
|
2015-05-21 23:46:01 +00:00
|
|
|
typedef const VD::vertex_type vert_t;
|
|
|
|
typedef const VD::edge_type edge_t;
|
|
|
|
|
2014-03-03 00:48:05 +00:00
|
|
|
// collect valid edges (i.e. prune those not belonging to MAT)
|
2015-05-21 23:46:01 +00:00
|
|
|
// note: this keeps twins, so it inserts twice the number of the valid edges
|
2016-03-20 19:20:32 +00:00
|
|
|
this->valid_edges.clear();
|
|
|
|
{
|
|
|
|
std::set<const VD::edge_type*> seen_edges;
|
|
|
|
for (VD::const_edge_iterator edge = this->vd.edges().begin(); edge != this->vd.edges().end(); ++edge) {
|
|
|
|
// if we only process segments representing closed loops, none if the
|
|
|
|
// infinite edges (if any) would be part of our MAT anyway
|
|
|
|
if (edge->is_secondary() || edge->is_infinite()) continue;
|
2014-03-04 22:33:13 +00:00
|
|
|
|
2016-03-20 19:20:32 +00:00
|
|
|
// don't re-validate twins
|
|
|
|
if (seen_edges.find(&*edge) != seen_edges.end()) continue;
|
|
|
|
seen_edges.insert(&*edge);
|
|
|
|
seen_edges.insert(edge->twin());
|
2014-03-04 22:33:13 +00:00
|
|
|
|
2016-03-20 19:20:32 +00:00
|
|
|
if (!this->validate_edge(&*edge)) continue;
|
|
|
|
this->valid_edges.insert(&*edge);
|
|
|
|
this->valid_edges.insert(edge->twin());
|
2014-03-04 22:33:13 +00:00
|
|
|
}
|
|
|
|
}
|
2016-03-20 19:20:32 +00:00
|
|
|
this->edges = this->valid_edges;
|
2014-03-04 22:33:13 +00:00
|
|
|
|
2014-03-03 00:48:05 +00:00
|
|
|
// iterate through the valid edges to build polylines
|
|
|
|
while (!this->edges.empty()) {
|
2016-03-20 19:20:32 +00:00
|
|
|
const edge_t* edge = *this->edges.begin();
|
2014-03-03 00:48:05 +00:00
|
|
|
|
|
|
|
// start a polyline
|
2016-03-19 14:33:58 +00:00
|
|
|
ThickPolyline polyline;
|
2016-03-20 19:20:32 +00:00
|
|
|
polyline.points.push_back(Point( edge->vertex0()->x(), edge->vertex0()->y() ));
|
|
|
|
polyline.points.push_back(Point( edge->vertex1()->x(), edge->vertex1()->y() ));
|
|
|
|
polyline.width.push_back(this->thickness[edge].first);
|
|
|
|
polyline.width.push_back(this->thickness[edge].second);
|
2014-03-03 00:48:05 +00:00
|
|
|
|
|
|
|
// remove this edge and its twin from the available edges
|
2016-03-20 19:20:32 +00:00
|
|
|
(void)this->edges.erase(edge);
|
|
|
|
(void)this->edges.erase(edge->twin());
|
2014-03-03 00:48:05 +00:00
|
|
|
|
|
|
|
// get next points
|
2016-03-20 19:20:32 +00:00
|
|
|
this->process_edge_neighbors(edge, &polyline);
|
2014-03-03 00:48:05 +00:00
|
|
|
|
|
|
|
// get previous points
|
2015-05-21 23:46:01 +00:00
|
|
|
{
|
2016-03-20 19:20:32 +00:00
|
|
|
ThickPolyline rpolyline;
|
|
|
|
this->process_edge_neighbors(edge->twin(), &rpolyline);
|
|
|
|
polyline.points.insert(polyline.points.begin(), rpolyline.points.rbegin(), rpolyline.points.rend());
|
|
|
|
polyline.width.insert(polyline.width.begin(), rpolyline.width.rbegin(), rpolyline.width.rend());
|
|
|
|
polyline.endpoints.first = rpolyline.endpoints.second;
|
2016-03-19 14:33:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert(polyline.width.size() == polyline.points.size()*2 - 2);
|
|
|
|
|
2016-03-20 19:20:32 +00:00
|
|
|
// prevent loop endpoints from being extended
|
2016-03-19 14:33:58 +00:00
|
|
|
if (polyline.first_point().coincides_with(polyline.last_point())) {
|
2016-03-20 19:20:32 +00:00
|
|
|
polyline.endpoints.first = false;
|
|
|
|
polyline.endpoints.second = false;
|
2015-05-21 23:46:01 +00:00
|
|
|
}
|
2014-03-03 00:48:05 +00:00
|
|
|
|
2015-05-21 23:46:01 +00:00
|
|
|
// append polyline to result
|
|
|
|
polylines->push_back(polyline);
|
2014-03-03 00:48:05 +00:00
|
|
|
}
|
2014-01-10 15:18:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2016-03-19 14:33:58 +00:00
|
|
|
MedialAxis::build(Polylines* polylines)
|
|
|
|
{
|
|
|
|
ThickPolylines tp;
|
|
|
|
this->build(&tp);
|
|
|
|
polylines->insert(polylines->end(), tp.begin(), tp.end());
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2016-03-20 19:20:32 +00:00
|
|
|
MedialAxis::process_edge_neighbors(const VD::edge_type* edge, ThickPolyline* polyline)
|
2014-01-10 15:18:55 +00:00
|
|
|
{
|
2016-03-20 19:20:32 +00:00
|
|
|
while (true) {
|
|
|
|
// Since rot_next() works on the edge starting point but we want
|
|
|
|
// to find neighbors on the ending point, we just swap edge with
|
|
|
|
// its twin.
|
|
|
|
const VD::edge_type* twin = edge->twin();
|
|
|
|
|
|
|
|
// count neighbors for this edge
|
|
|
|
std::vector<const VD::edge_type*> neighbors;
|
|
|
|
for (const VD::edge_type* neighbor = twin->rot_next(); neighbor != twin;
|
|
|
|
neighbor = neighbor->rot_next()) {
|
|
|
|
if (this->valid_edges.count(neighbor) > 0) neighbors.push_back(neighbor);
|
|
|
|
}
|
2014-01-10 15:18:55 +00:00
|
|
|
|
2016-03-20 19:20:32 +00:00
|
|
|
// if we have a single neighbor then we can continue recursively
|
|
|
|
if (neighbors.size() == 1) {
|
|
|
|
const VD::edge_type* neighbor = neighbors.front();
|
|
|
|
|
|
|
|
// break if this is a closed loop
|
|
|
|
if (this->edges.count(neighbor) == 0) return;
|
|
|
|
|
|
|
|
Point new_point(neighbor->vertex1()->x(), neighbor->vertex1()->y());
|
|
|
|
polyline->points.push_back(new_point);
|
|
|
|
polyline->width.push_back(this->thickness[neighbor].first);
|
|
|
|
polyline->width.push_back(this->thickness[neighbor].second);
|
|
|
|
(void)this->edges.erase(neighbor);
|
|
|
|
(void)this->edges.erase(neighbor->twin());
|
|
|
|
edge = neighbor;
|
|
|
|
} else if (neighbors.size() == 0) {
|
|
|
|
polyline->endpoints.second = true;
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
// T-shaped or star-shaped joint
|
|
|
|
return;
|
|
|
|
}
|
2014-01-09 18:56:12 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-01-10 15:18:55 +00:00
|
|
|
bool
|
2016-03-20 19:20:32 +00:00
|
|
|
MedialAxis::validate_edge(const VD::edge_type* edge)
|
2014-01-10 15:18:55 +00:00
|
|
|
{
|
2016-03-20 19:20:32 +00:00
|
|
|
// construct the line representing this edge of the Voronoi diagram
|
|
|
|
const Line line(
|
|
|
|
Point( edge->vertex0()->x(), edge->vertex0()->y() ),
|
|
|
|
Point( edge->vertex1()->x(), edge->vertex1()->y() )
|
|
|
|
);
|
|
|
|
|
|
|
|
// discard edge if it lies outside the supplied shape
|
|
|
|
// this could maybe be optimized (checking inclusion of the endpoints
|
|
|
|
// might give false positives as they might belong to the contour itself)
|
|
|
|
if (this->expolygon != NULL) {
|
|
|
|
if (line.a.coincides_with(line.b)) {
|
|
|
|
// in this case, contains(line) returns a false positive
|
|
|
|
if (!this->expolygon->contains(line.a)) return false;
|
|
|
|
} else {
|
|
|
|
if (!this->expolygon->contains(line)) return false;
|
|
|
|
}
|
|
|
|
}
|
2014-01-10 15:18:55 +00:00
|
|
|
|
2015-05-13 18:47:26 +00:00
|
|
|
// retrieve the original line segments which generated the edge we're checking
|
2016-03-20 19:20:32 +00:00
|
|
|
const VD::cell_type* cell1 = edge->cell();
|
|
|
|
const VD::cell_type* cell2 = edge->twin()->cell();
|
2015-05-13 18:47:26 +00:00
|
|
|
const Line &segment1 = this->retrieve_segment(cell1);
|
|
|
|
const Line &segment2 = this->retrieve_segment(cell2);
|
|
|
|
|
2016-03-20 19:20:32 +00:00
|
|
|
/* Calculate thickness of the section at both the endpoints of this edge.
|
|
|
|
Our Voronoi edge is part of a CCW sequence going around its Voronoi cell
|
|
|
|
(segment1). This edge's twin goes around segment2. Thus, segment2 is
|
|
|
|
oriented in the same direction as our main edge, and segment1 is oriented
|
|
|
|
in the same direction as our twin edge.
|
|
|
|
We used to only consider the (half-)distances to segment2, and that works
|
|
|
|
whenever segment1 and segment2 are almost specular and facing. However,
|
|
|
|
at curves they are staggered and they only face for a very little length
|
|
|
|
(such visibility actually coincides with our very short edge). This is why
|
|
|
|
we calculate w0 and w1 this way.
|
|
|
|
When cell1 or cell2 don't refer to the segment but only to an endpoint, we
|
|
|
|
calculate the distance to that endpoint instead. */
|
|
|
|
|
|
|
|
coordf_t w0 = cell2->contains_segment()
|
|
|
|
? line.a.perp_distance_to(segment2)*2
|
|
|
|
: line.a.distance_to(this->retrieve_endpoint(cell2))*2;
|
|
|
|
|
|
|
|
coordf_t w1 = cell1->contains_segment()
|
|
|
|
? line.b.perp_distance_to(segment1)*2
|
|
|
|
: line.b.distance_to(this->retrieve_endpoint(cell1))*2;
|
2015-05-13 18:47:26 +00:00
|
|
|
|
|
|
|
// if this edge is the centerline for a very thin area, we might want to skip it
|
|
|
|
// in case the area is too thin
|
2016-03-20 19:20:32 +00:00
|
|
|
if (w0 < SCALED_EPSILON || w1 < SCALED_EPSILON) {
|
|
|
|
if (cell1->contains_segment() && cell2->contains_segment()) {
|
|
|
|
// calculate the relative angle between the two boundary segments
|
|
|
|
double angle = fabs(segment2.orientation() - segment1.orientation());
|
|
|
|
|
|
|
|
// fabs(angle) ranges from 0 (collinear, same direction) to PI (collinear, opposite direction)
|
|
|
|
// we're interested only in segments close to the second case (facing segments)
|
|
|
|
// so we allow some tolerance.
|
|
|
|
// this filter ensures that we're dealing with a narrow/oriented area (longer than thick)
|
|
|
|
// we don't run it on edges not generated by two segments (thus generated by one segment
|
|
|
|
// and the endpoint of another segment), since their orientation would not be meaningful
|
|
|
|
if (fabs(angle - PI) > PI/5) return false;
|
|
|
|
} else {
|
|
|
|
return false;
|
|
|
|
}
|
2015-05-13 18:47:26 +00:00
|
|
|
}
|
2015-06-15 15:00:10 +00:00
|
|
|
|
2016-03-20 19:20:32 +00:00
|
|
|
if (w0 < this->min_width && w1 < this->min_width)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (w0 > this->max_width && w1 > this->max_width)
|
2016-03-19 14:33:58 +00:00
|
|
|
return false;
|
|
|
|
|
2016-03-20 19:20:32 +00:00
|
|
|
this->thickness[edge] = std::make_pair(w0, w1);
|
|
|
|
this->thickness[edge->twin()] = std::make_pair(w1, w0);
|
2016-03-19 14:33:58 +00:00
|
|
|
|
2015-05-13 18:47:26 +00:00
|
|
|
return true;
|
2014-01-09 18:56:12 +00:00
|
|
|
}
|
|
|
|
|
2015-05-13 18:47:26 +00:00
|
|
|
const Line&
|
2016-03-20 19:20:32 +00:00
|
|
|
MedialAxis::retrieve_segment(const VD::cell_type* cell) const
|
|
|
|
{
|
|
|
|
return this->lines[cell->source_index()];
|
|
|
|
}
|
|
|
|
|
|
|
|
const Point&
|
|
|
|
MedialAxis::retrieve_endpoint(const VD::cell_type* cell) const
|
2014-01-09 18:56:12 +00:00
|
|
|
{
|
2016-03-20 19:20:32 +00:00
|
|
|
const Line& line = this->retrieve_segment(cell);
|
|
|
|
if (cell->source_category() == SOURCE_CATEGORY_SEGMENT_START_POINT) {
|
|
|
|
return line.a;
|
|
|
|
} else {
|
|
|
|
return line.b;
|
|
|
|
}
|
2014-01-09 18:56:12 +00:00
|
|
|
}
|
|
|
|
|
2013-11-23 20:54:56 +00:00
|
|
|
} }
|