Merge remote-tracking branch 'origin/master' into ys_resin_cost
This commit is contained in:
commit
49175c3112
306 changed files with 91525 additions and 9504 deletions
|
@ -37,7 +37,7 @@ set(SLIC3R_GTK "2" CACHE STRING "GTK version to use with wxWidgets on Linux")
|
|||
|
||||
# Proposal for C++ unit tests and sandboxes
|
||||
option(SLIC3R_BUILD_SANDBOXES "Build development sandboxes" OFF)
|
||||
option(SLIC3R_BUILD_TESTS "Build unit tests" OFF)
|
||||
option(SLIC3R_BUILD_TESTS "Build unit tests" ON)
|
||||
|
||||
# Print out the SLIC3R_* cache options
|
||||
get_cmake_property(_cache_vars CACHE_VARIABLES)
|
||||
|
@ -173,10 +173,11 @@ if (NOT MSVC AND ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_CXX_COMP
|
|||
# On GCC and Clang, no return from a non-void function is a warning only. Here, we make it an error.
|
||||
add_compile_options(-Werror=return-type)
|
||||
|
||||
#removes LOTS of extraneous Eigen warnings (GCC only supports it since 6.1)
|
||||
#if("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang" OR CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 6.1)
|
||||
# add_compile_options(-Wno-ignored-attributes) # Tamas: Eigen include dirs are marked as SYSTEM
|
||||
#endif()
|
||||
# removes LOTS of extraneous Eigen warnings (GCC only supports it since 6.1)
|
||||
# https://eigen.tuxfamily.org/bz/show_bug.cgi?id=1221
|
||||
if("${CMAKE_CXX_COMPILER_ID}" MATCHES "Clang" OR CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 6.0)
|
||||
add_compile_options(-Wno-ignored-attributes) # Tamas: Eigen include dirs are marked as SYSTEM
|
||||
endif()
|
||||
|
||||
#GCC generates loads of -Wunknown-pragmas when compiling igl. The fix is not easy due to a bug in gcc, see
|
||||
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=66943 or
|
||||
|
@ -190,6 +191,7 @@ if (NOT MSVC AND ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU" OR "${CMAKE_CXX_COMP
|
|||
add_compile_options(-fsanitize=address -fno-omit-frame-pointer)
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=address")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fsanitize=address")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} -fsanitize=address")
|
||||
|
||||
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -lasan")
|
||||
|
@ -254,7 +256,8 @@ if(NOT WIN32)
|
|||
# boost::process was introduced first in version 1.64.0
|
||||
set(MINIMUM_BOOST_VERSION "1.64.0")
|
||||
endif()
|
||||
find_package(Boost ${MINIMUM_BOOST_VERSION} REQUIRED COMPONENTS system filesystem thread log locale regex)
|
||||
set(_boost_components "system;filesystem;thread;log;locale;regex")
|
||||
find_package(Boost ${MINIMUM_BOOST_VERSION} REQUIRED COMPONENTS ${_boost_components})
|
||||
|
||||
add_library(boost_libs INTERFACE)
|
||||
add_library(boost_headeronly INTERFACE)
|
||||
|
@ -268,37 +271,55 @@ if(NOT SLIC3R_STATIC)
|
|||
target_compile_definitions(boost_headeronly INTERFACE BOOST_LOG_DYN_LINK)
|
||||
endif()
|
||||
|
||||
function(slic3r_remap_configs targets from_Cfg to_Cfg)
|
||||
if(MSVC)
|
||||
string(TOUPPER ${from_Cfg} from_CFG)
|
||||
|
||||
foreach(tgt ${targets})
|
||||
if(TARGET ${tgt})
|
||||
set_target_properties(${tgt} PROPERTIES MAP_IMPORTED_CONFIG_${from_CFG} ${to_Cfg})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
if(TARGET Boost::system)
|
||||
message(STATUS "Boost::boost exists")
|
||||
target_link_libraries(boost_headeronly INTERFACE Boost::boost)
|
||||
|
||||
# Only from cmake 3.12
|
||||
# list(TRANSFORM _boost_components PREPEND Boost:: OUTPUT_VARIABLE _boost_targets)
|
||||
set(_boost_targets "")
|
||||
foreach(comp ${_boost_components})
|
||||
list(APPEND _boost_targets "Boost::${comp}")
|
||||
endforeach()
|
||||
|
||||
target_link_libraries(boost_libs INTERFACE
|
||||
boost_headeronly # includes the custom compile definitions as well
|
||||
Boost::system
|
||||
Boost::filesystem
|
||||
Boost::thread
|
||||
Boost::log
|
||||
Boost::locale
|
||||
Boost::regex
|
||||
${_boost_targets}
|
||||
)
|
||||
slic3r_remap_configs("${_boost_targets}" RelWithDebInfo Release)
|
||||
else()
|
||||
target_include_directories(boost_headeronly INTERFACE ${Boost_INCLUDE_DIRS})
|
||||
target_link_libraries(boost_libs INTERFACE boost_headeronly ${Boost_LIBRARIES})
|
||||
endif()
|
||||
|
||||
|
||||
|
||||
# Find and configure intel-tbb
|
||||
if(SLIC3R_STATIC)
|
||||
set(TBB_STATIC 1)
|
||||
endif()
|
||||
set(TBB_DEBUG 1)
|
||||
find_package(TBB REQUIRED)
|
||||
include_directories(${TBB_INCLUDE_DIRS})
|
||||
add_definitions(${TBB_DEFINITIONS})
|
||||
if(MSVC)
|
||||
# Suppress implicit linking of the TBB libraries by the Visual Studio compiler.
|
||||
add_definitions(-D__TBB_NO_IMPLICIT_LINKAGE)
|
||||
endif()
|
||||
# include_directories(${TBB_INCLUDE_DIRS})
|
||||
# add_definitions(${TBB_DEFINITIONS})
|
||||
# if(MSVC)
|
||||
# # Suppress implicit linking of the TBB libraries by the Visual Studio compiler.
|
||||
# add_definitions(-D__TBB_NO_IMPLICIT_LINKAGE)
|
||||
# endif()
|
||||
# The Intel TBB library will use the std::exception_ptr feature of C++11.
|
||||
add_definitions(-DTBB_USE_CAPTURED_EXCEPTION=0)
|
||||
# add_definitions(-DTBB_USE_CAPTURED_EXCEPTION=0)
|
||||
|
||||
find_package(CURL REQUIRED)
|
||||
include_directories(${CURL_INCLUDE_DIRS})
|
||||
|
@ -375,6 +396,16 @@ add_custom_target(pot
|
|||
COMMENT "Generate pot file from strings in the source tree"
|
||||
)
|
||||
|
||||
find_package(NLopt 1.4 REQUIRED)
|
||||
|
||||
if(SLIC3R_STATIC)
|
||||
set(OPENVDB_USE_STATIC_LIBS ON)
|
||||
set(USE_BLOSC TRUE)
|
||||
endif()
|
||||
|
||||
#find_package(OpenVDB 5.0 COMPONENTS openvdb)
|
||||
#slic3r_remap_configs(IlmBase::Half RelWithDebInfo Release)
|
||||
|
||||
# libslic3r, PrusaSlicer GUI and the PrusaSlicer executable.
|
||||
add_subdirectory(src)
|
||||
set_property(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} PROPERTY VS_STARTUP_PROJECT PrusaSlicer_app_console)
|
||||
|
|
175
cmake/modules/Catch2/Catch.cmake
Normal file
175
cmake/modules/Catch2/Catch.cmake
Normal file
|
@ -0,0 +1,175 @@
|
|||
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
|
||||
# file Copyright.txt or https://cmake.org/licensing for details.
|
||||
|
||||
#[=======================================================================[.rst:
|
||||
Catch
|
||||
-----
|
||||
|
||||
This module defines a function to help use the Catch test framework.
|
||||
|
||||
The :command:`catch_discover_tests` discovers tests by asking the compiled test
|
||||
executable to enumerate its tests. This does not require CMake to be re-run
|
||||
when tests change. However, it may not work in a cross-compiling environment,
|
||||
and setting test properties is less convenient.
|
||||
|
||||
This command is intended to replace use of :command:`add_test` to register
|
||||
tests, and will create a separate CTest test for each Catch test case. Note
|
||||
that this is in some cases less efficient, as common set-up and tear-down logic
|
||||
cannot be shared by multiple test cases executing in the same instance.
|
||||
However, it provides more fine-grained pass/fail information to CTest, which is
|
||||
usually considered as more beneficial. By default, the CTest test name is the
|
||||
same as the Catch name; see also ``TEST_PREFIX`` and ``TEST_SUFFIX``.
|
||||
|
||||
.. command:: catch_discover_tests
|
||||
|
||||
Automatically add tests with CTest by querying the compiled test executable
|
||||
for available tests::
|
||||
|
||||
catch_discover_tests(target
|
||||
[TEST_SPEC arg1...]
|
||||
[EXTRA_ARGS arg1...]
|
||||
[WORKING_DIRECTORY dir]
|
||||
[TEST_PREFIX prefix]
|
||||
[TEST_SUFFIX suffix]
|
||||
[PROPERTIES name1 value1...]
|
||||
[TEST_LIST var]
|
||||
)
|
||||
|
||||
``catch_discover_tests`` sets up a post-build command on the test executable
|
||||
that generates the list of tests by parsing the output from running the test
|
||||
with the ``--list-test-names-only`` argument. This ensures that the full
|
||||
list of tests is obtained. Since test discovery occurs at build time, it is
|
||||
not necessary to re-run CMake when the list of tests changes.
|
||||
However, it requires that :prop_tgt:`CROSSCOMPILING_EMULATOR` is properly set
|
||||
in order to function in a cross-compiling environment.
|
||||
|
||||
Additionally, setting properties on tests is somewhat less convenient, since
|
||||
the tests are not available at CMake time. Additional test properties may be
|
||||
assigned to the set of tests as a whole using the ``PROPERTIES`` option. If
|
||||
more fine-grained test control is needed, custom content may be provided
|
||||
through an external CTest script using the :prop_dir:`TEST_INCLUDE_FILES`
|
||||
directory property. The set of discovered tests is made accessible to such a
|
||||
script via the ``<target>_TESTS`` variable.
|
||||
|
||||
The options are:
|
||||
|
||||
``target``
|
||||
Specifies the Catch executable, which must be a known CMake executable
|
||||
target. CMake will substitute the location of the built executable when
|
||||
running the test.
|
||||
|
||||
``TEST_SPEC arg1...``
|
||||
Specifies test cases, wildcarded test cases, tags and tag expressions to
|
||||
pass to the Catch executable with the ``--list-test-names-only`` argument.
|
||||
|
||||
``EXTRA_ARGS arg1...``
|
||||
Any extra arguments to pass on the command line to each test case.
|
||||
|
||||
``WORKING_DIRECTORY dir``
|
||||
Specifies the directory in which to run the discovered test cases. If this
|
||||
option is not provided, the current binary directory is used.
|
||||
|
||||
``TEST_PREFIX prefix``
|
||||
Specifies a ``prefix`` to be prepended to the name of each discovered test
|
||||
case. This can be useful when the same test executable is being used in
|
||||
multiple calls to ``catch_discover_tests()`` but with different
|
||||
``TEST_SPEC`` or ``EXTRA_ARGS``.
|
||||
|
||||
``TEST_SUFFIX suffix``
|
||||
Similar to ``TEST_PREFIX`` except the ``suffix`` is appended to the name of
|
||||
every discovered test case. Both ``TEST_PREFIX`` and ``TEST_SUFFIX`` may
|
||||
be specified.
|
||||
|
||||
``PROPERTIES name1 value1...``
|
||||
Specifies additional properties to be set on all tests discovered by this
|
||||
invocation of ``catch_discover_tests``.
|
||||
|
||||
``TEST_LIST var``
|
||||
Make the list of tests available in the variable ``var``, rather than the
|
||||
default ``<target>_TESTS``. This can be useful when the same test
|
||||
executable is being used in multiple calls to ``catch_discover_tests()``.
|
||||
Note that this variable is only available in CTest.
|
||||
|
||||
#]=======================================================================]
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
function(catch_discover_tests TARGET)
|
||||
cmake_parse_arguments(
|
||||
""
|
||||
""
|
||||
"TEST_PREFIX;TEST_SUFFIX;WORKING_DIRECTORY;TEST_LIST"
|
||||
"TEST_SPEC;EXTRA_ARGS;PROPERTIES"
|
||||
${ARGN}
|
||||
)
|
||||
|
||||
if(NOT _WORKING_DIRECTORY)
|
||||
set(_WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
endif()
|
||||
if(NOT _TEST_LIST)
|
||||
set(_TEST_LIST ${TARGET}_TESTS)
|
||||
endif()
|
||||
|
||||
## Generate a unique name based on the extra arguments
|
||||
string(SHA1 args_hash "${_TEST_SPEC} ${_EXTRA_ARGS}")
|
||||
string(SUBSTRING ${args_hash} 0 7 args_hash)
|
||||
|
||||
# Define rule to generate test list for aforementioned test executable
|
||||
set(ctest_include_file "${CMAKE_CURRENT_BINARY_DIR}/${TARGET}_include-${args_hash}.cmake")
|
||||
set(ctest_tests_file "${CMAKE_CURRENT_BINARY_DIR}/${TARGET}_tests-${args_hash}.cmake")
|
||||
get_property(crosscompiling_emulator
|
||||
TARGET ${TARGET}
|
||||
PROPERTY CROSSCOMPILING_EMULATOR
|
||||
)
|
||||
add_custom_command(
|
||||
TARGET ${TARGET} POST_BUILD
|
||||
BYPRODUCTS "${ctest_tests_file}"
|
||||
COMMAND "${CMAKE_COMMAND}"
|
||||
-D "TEST_TARGET=${TARGET}"
|
||||
-D "TEST_EXECUTABLE=$<TARGET_FILE:${TARGET}>"
|
||||
-D "TEST_EXECUTOR=${crosscompiling_emulator}"
|
||||
-D "TEST_WORKING_DIR=${_WORKING_DIRECTORY}"
|
||||
-D "TEST_SPEC=${_TEST_SPEC}"
|
||||
-D "TEST_EXTRA_ARGS=${_EXTRA_ARGS}"
|
||||
-D "TEST_PROPERTIES=${_PROPERTIES}"
|
||||
-D "TEST_PREFIX='${_TEST_PREFIX}'"
|
||||
-D "TEST_SUFFIX='${_TEST_SUFFIX}'"
|
||||
-D "TEST_LIST=${_TEST_LIST}"
|
||||
-D "CTEST_FILE=${ctest_tests_file}"
|
||||
-P "${_CATCH_DISCOVER_TESTS_SCRIPT}"
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
file(WRITE "${ctest_include_file}"
|
||||
"if(EXISTS \"${ctest_tests_file}\")\n"
|
||||
" include(\"${ctest_tests_file}\")\n"
|
||||
"else()\n"
|
||||
" add_test(${TARGET}_NOT_BUILT-${args_hash} ${TARGET}_NOT_BUILT-${args_hash})\n"
|
||||
"endif()\n"
|
||||
)
|
||||
|
||||
if(NOT ${CMAKE_VERSION} VERSION_LESS "3.10.0")
|
||||
# Add discovered tests to directory TEST_INCLUDE_FILES
|
||||
set_property(DIRECTORY
|
||||
APPEND PROPERTY TEST_INCLUDE_FILES "${ctest_include_file}"
|
||||
)
|
||||
else()
|
||||
# Add discovered tests as directory TEST_INCLUDE_FILE if possible
|
||||
get_property(test_include_file_set DIRECTORY PROPERTY TEST_INCLUDE_FILE SET)
|
||||
if (NOT ${test_include_file_set})
|
||||
set_property(DIRECTORY
|
||||
PROPERTY TEST_INCLUDE_FILE "${ctest_include_file}"
|
||||
)
|
||||
else()
|
||||
message(FATAL_ERROR
|
||||
"Cannot set more than one TEST_INCLUDE_FILE"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
endfunction()
|
||||
|
||||
###############################################################################
|
||||
|
||||
set(_CATCH_DISCOVER_TESTS_SCRIPT
|
||||
${CMAKE_CURRENT_LIST_DIR}/CatchAddTests.cmake
|
||||
)
|
106
cmake/modules/Catch2/CatchAddTests.cmake
Normal file
106
cmake/modules/Catch2/CatchAddTests.cmake
Normal file
|
@ -0,0 +1,106 @@
|
|||
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
|
||||
# file Copyright.txt or https://cmake.org/licensing for details.
|
||||
|
||||
set(prefix "${TEST_PREFIX}")
|
||||
set(suffix "${TEST_SUFFIX}")
|
||||
set(spec ${TEST_SPEC})
|
||||
set(extra_args ${TEST_EXTRA_ARGS})
|
||||
set(properties ${TEST_PROPERTIES})
|
||||
set(script)
|
||||
set(suite)
|
||||
set(tests)
|
||||
|
||||
function(add_command NAME)
|
||||
set(_args "")
|
||||
foreach(_arg ${ARGN})
|
||||
if(_arg MATCHES "[^-./:a-zA-Z0-9_]")
|
||||
set(_args "${_args} [==[${_arg}]==]") # form a bracket_argument
|
||||
else()
|
||||
set(_args "${_args} ${_arg}")
|
||||
endif()
|
||||
endforeach()
|
||||
set(script "${script}${NAME}(${_args})\n" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
macro(_add_catch_test_labels LINE)
|
||||
# convert to list of tags
|
||||
string(REPLACE "][" "]\\;[" tags ${line})
|
||||
|
||||
add_command(
|
||||
set_tests_properties "${prefix}${test}${suffix}"
|
||||
PROPERTIES
|
||||
LABELS "${tags}"
|
||||
)
|
||||
endmacro()
|
||||
|
||||
macro(_add_catch_test LINE)
|
||||
set(test ${line})
|
||||
# use escape commas to handle properly test cases with commans inside the name
|
||||
string(REPLACE "," "\\," test_name ${test})
|
||||
# ...and add to script
|
||||
add_command(
|
||||
add_test "${prefix}${test}${suffix}"
|
||||
${TEST_EXECUTOR}
|
||||
"${TEST_EXECUTABLE}"
|
||||
"${test_name}"
|
||||
${extra_args}
|
||||
)
|
||||
|
||||
add_command(
|
||||
set_tests_properties "${prefix}${test}${suffix}"
|
||||
PROPERTIES
|
||||
WORKING_DIRECTORY "${TEST_WORKING_DIR}"
|
||||
${properties}
|
||||
)
|
||||
list(APPEND tests "${prefix}${test}${suffix}")
|
||||
endmacro()
|
||||
|
||||
# Run test executable to get list of available tests
|
||||
if(NOT EXISTS "${TEST_EXECUTABLE}")
|
||||
message(FATAL_ERROR
|
||||
"Specified test executable '${TEST_EXECUTABLE}' does not exist"
|
||||
)
|
||||
endif()
|
||||
execute_process(
|
||||
COMMAND ${TEST_EXECUTOR} "${TEST_EXECUTABLE}" ${spec} --list-tests
|
||||
OUTPUT_VARIABLE output
|
||||
RESULT_VARIABLE result
|
||||
)
|
||||
# Catch --list-test-names-only reports the number of tests, so 0 is... surprising
|
||||
if(${result} EQUAL 0)
|
||||
message(WARNING
|
||||
"Test executable '${TEST_EXECUTABLE}' contains no tests!\n"
|
||||
)
|
||||
elseif(${result} LESS 0)
|
||||
message(FATAL_ERROR
|
||||
"Error running test executable '${TEST_EXECUTABLE}':\n"
|
||||
" Result: ${result}\n"
|
||||
" Output: ${output}\n"
|
||||
)
|
||||
endif()
|
||||
|
||||
string(REPLACE "\n" ";" output "${output}")
|
||||
set(test)
|
||||
set(tags_regex "(\\[([^\\[]*)\\])+$")
|
||||
|
||||
# Parse output
|
||||
foreach(line ${output})
|
||||
# lines without leading whitespaces are catch output not tests
|
||||
if(${line} MATCHES "^[ \t]+")
|
||||
# strip leading spaces and tabs
|
||||
string(REGEX REPLACE "^[ \t]+" "" line ${line})
|
||||
|
||||
if(${line} MATCHES "${tags_regex}")
|
||||
_add_catch_test_labels(${line})
|
||||
else()
|
||||
_add_catch_test(${line})
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# Create a list of all discovered tests, which users may use to e.g. set
|
||||
# properties on the tests
|
||||
add_command(set ${TEST_LIST} ${tests})
|
||||
|
||||
# Write CTest script
|
||||
file(WRITE "${CTEST_FILE}" "${script}")
|
225
cmake/modules/Catch2/ParseAndAddCatchTests.cmake
Normal file
225
cmake/modules/Catch2/ParseAndAddCatchTests.cmake
Normal file
|
@ -0,0 +1,225 @@
|
|||
#==================================================================================================#
|
||||
# supported macros #
|
||||
# - TEST_CASE, #
|
||||
# - SCENARIO, #
|
||||
# - TEST_CASE_METHOD, #
|
||||
# - CATCH_TEST_CASE, #
|
||||
# - CATCH_SCENARIO, #
|
||||
# - CATCH_TEST_CASE_METHOD. #
|
||||
# #
|
||||
# Usage #
|
||||
# 1. make sure this module is in the path or add this otherwise: #
|
||||
# set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake.modules/") #
|
||||
# 2. make sure that you've enabled testing option for the project by the call: #
|
||||
# enable_testing() #
|
||||
# 3. add the lines to the script for testing target (sample CMakeLists.txt): #
|
||||
# project(testing_target) #
|
||||
# set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake.modules/") #
|
||||
# enable_testing() #
|
||||
# #
|
||||
# find_path(CATCH_INCLUDE_DIR "catch.hpp") #
|
||||
# include_directories(${INCLUDE_DIRECTORIES} ${CATCH_INCLUDE_DIR}) #
|
||||
# #
|
||||
# file(GLOB SOURCE_FILES "*.cpp") #
|
||||
# add_executable(${PROJECT_NAME} ${SOURCE_FILES}) #
|
||||
# #
|
||||
# include(ParseAndAddCatchTests) #
|
||||
# ParseAndAddCatchTests(${PROJECT_NAME}) #
|
||||
# #
|
||||
# The following variables affect the behavior of the script: #
|
||||
# #
|
||||
# PARSE_CATCH_TESTS_VERBOSE (Default OFF) #
|
||||
# -- enables debug messages #
|
||||
# PARSE_CATCH_TESTS_NO_HIDDEN_TESTS (Default OFF) #
|
||||
# -- excludes tests marked with [!hide], [.] or [.foo] tags #
|
||||
# PARSE_CATCH_TESTS_ADD_FIXTURE_IN_TEST_NAME (Default ON) #
|
||||
# -- adds fixture class name to the test name #
|
||||
# PARSE_CATCH_TESTS_ADD_TARGET_IN_TEST_NAME (Default ON) #
|
||||
# -- adds cmake target name to the test name #
|
||||
# PARSE_CATCH_TESTS_ADD_TO_CONFIGURE_DEPENDS (Default OFF) #
|
||||
# -- causes CMake to rerun when file with tests changes so that new tests will be discovered #
|
||||
# #
|
||||
# One can also set (locally) the optional variable OptionalCatchTestLauncher to precise the way #
|
||||
# a test should be run. For instance to use test MPI, one can write #
|
||||
# set(OptionalCatchTestLauncher ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} ${NUMPROC}) #
|
||||
# just before calling this ParseAndAddCatchTests function #
|
||||
# #
|
||||
# The AdditionalCatchParameters optional variable can be used to pass extra argument to the test #
|
||||
# command. For example, to include successful tests in the output, one can write #
|
||||
# set(AdditionalCatchParameters --success) #
|
||||
# #
|
||||
# After the script, the ParseAndAddCatchTests_TESTS property for the target, and for each source #
|
||||
# file in the target is set, and contains the list of the tests extracted from that target, or #
|
||||
# from that file. This is useful, for example to add further labels or properties to the tests. #
|
||||
# #
|
||||
#==================================================================================================#
|
||||
|
||||
if (CMAKE_MINIMUM_REQUIRED_VERSION VERSION_LESS 2.8.8)
|
||||
message(FATAL_ERROR "ParseAndAddCatchTests requires CMake 2.8.8 or newer")
|
||||
endif()
|
||||
|
||||
option(PARSE_CATCH_TESTS_VERBOSE "Print Catch to CTest parser debug messages" OFF)
|
||||
option(PARSE_CATCH_TESTS_NO_HIDDEN_TESTS "Exclude tests with [!hide], [.] or [.foo] tags" OFF)
|
||||
option(PARSE_CATCH_TESTS_ADD_FIXTURE_IN_TEST_NAME "Add fixture class name to the test name" ON)
|
||||
option(PARSE_CATCH_TESTS_ADD_TARGET_IN_TEST_NAME "Add target name to the test name" ON)
|
||||
option(PARSE_CATCH_TESTS_ADD_TO_CONFIGURE_DEPENDS "Add test file to CMAKE_CONFIGURE_DEPENDS property" OFF)
|
||||
|
||||
function(ParseAndAddCatchTests_PrintDebugMessage)
|
||||
if(PARSE_CATCH_TESTS_VERBOSE)
|
||||
message(STATUS "ParseAndAddCatchTests: ${ARGV}")
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# This removes the contents between
|
||||
# - block comments (i.e. /* ... */)
|
||||
# - full line comments (i.e. // ... )
|
||||
# contents have been read into '${CppCode}'.
|
||||
# !keep partial line comments
|
||||
function(ParseAndAddCatchTests_RemoveComments CppCode)
|
||||
string(ASCII 2 CMakeBeginBlockComment)
|
||||
string(ASCII 3 CMakeEndBlockComment)
|
||||
string(REGEX REPLACE "/\\*" "${CMakeBeginBlockComment}" ${CppCode} "${${CppCode}}")
|
||||
string(REGEX REPLACE "\\*/" "${CMakeEndBlockComment}" ${CppCode} "${${CppCode}}")
|
||||
string(REGEX REPLACE "${CMakeBeginBlockComment}[^${CMakeEndBlockComment}]*${CMakeEndBlockComment}" "" ${CppCode} "${${CppCode}}")
|
||||
string(REGEX REPLACE "\n[ \t]*//+[^\n]+" "\n" ${CppCode} "${${CppCode}}")
|
||||
|
||||
set(${CppCode} "${${CppCode}}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Worker function
|
||||
function(ParseAndAddCatchTests_ParseFile SourceFile TestTarget)
|
||||
# If SourceFile is an object library, do not scan it (as it is not a file). Exit without giving a warning about a missing file.
|
||||
if(SourceFile MATCHES "\\\$<TARGET_OBJECTS:.+>")
|
||||
ParseAndAddCatchTests_PrintDebugMessage("Detected OBJECT library: ${SourceFile} this will not be scanned for tests.")
|
||||
return()
|
||||
endif()
|
||||
# According to CMake docs EXISTS behavior is well-defined only for full paths.
|
||||
get_filename_component(SourceFile ${SourceFile} ABSOLUTE)
|
||||
if(NOT EXISTS ${SourceFile})
|
||||
message(WARNING "Cannot find source file: ${SourceFile}")
|
||||
return()
|
||||
endif()
|
||||
ParseAndAddCatchTests_PrintDebugMessage("parsing ${SourceFile}")
|
||||
file(STRINGS ${SourceFile} Contents NEWLINE_CONSUME)
|
||||
|
||||
# Remove block and fullline comments
|
||||
ParseAndAddCatchTests_RemoveComments(Contents)
|
||||
|
||||
# Find definition of test names
|
||||
string(REGEX MATCHALL "[ \t]*(CATCH_)?(TEST_CASE_METHOD|SCENARIO|TEST_CASE)[ \t]*\\([^\)]+\\)+[ \t\n]*{+[ \t]*(//[^\n]*[Tt][Ii][Mm][Ee][Oo][Uu][Tt][ \t]*[0-9]+)*" Tests "${Contents}")
|
||||
|
||||
if(PARSE_CATCH_TESTS_ADD_TO_CONFIGURE_DEPENDS AND Tests)
|
||||
ParseAndAddCatchTests_PrintDebugMessage("Adding ${SourceFile} to CMAKE_CONFIGURE_DEPENDS property")
|
||||
set_property(
|
||||
DIRECTORY
|
||||
APPEND
|
||||
PROPERTY CMAKE_CONFIGURE_DEPENDS ${SourceFile}
|
||||
)
|
||||
endif()
|
||||
|
||||
foreach(TestName ${Tests})
|
||||
# Strip newlines
|
||||
string(REGEX REPLACE "\\\\\n|\n" "" TestName "${TestName}")
|
||||
|
||||
# Get test type and fixture if applicable
|
||||
string(REGEX MATCH "(CATCH_)?(TEST_CASE_METHOD|SCENARIO|TEST_CASE)[ \t]*\\([^,^\"]*" TestTypeAndFixture "${TestName}")
|
||||
string(REGEX MATCH "(CATCH_)?(TEST_CASE_METHOD|SCENARIO|TEST_CASE)" TestType "${TestTypeAndFixture}")
|
||||
string(REGEX REPLACE "${TestType}\\([ \t]*" "" TestFixture "${TestTypeAndFixture}")
|
||||
|
||||
# Get string parts of test definition
|
||||
string(REGEX MATCHALL "\"+([^\\^\"]|\\\\\")+\"+" TestStrings "${TestName}")
|
||||
|
||||
# Strip wrapping quotation marks
|
||||
string(REGEX REPLACE "^\"(.*)\"$" "\\1" TestStrings "${TestStrings}")
|
||||
string(REPLACE "\";\"" ";" TestStrings "${TestStrings}")
|
||||
|
||||
# Validate that a test name and tags have been provided
|
||||
list(LENGTH TestStrings TestStringsLength)
|
||||
if(TestStringsLength GREATER 2 OR TestStringsLength LESS 1)
|
||||
message(FATAL_ERROR "You must provide a valid test name and tags for all tests in ${SourceFile}")
|
||||
endif()
|
||||
|
||||
# Assign name and tags
|
||||
list(GET TestStrings 0 Name)
|
||||
if("${TestType}" STREQUAL "SCENARIO")
|
||||
set(Name "Scenario: ${Name}")
|
||||
endif()
|
||||
if(PARSE_CATCH_TESTS_ADD_FIXTURE_IN_TEST_NAME AND TestFixture)
|
||||
set(CTestName "${TestFixture}:${Name}")
|
||||
else()
|
||||
set(CTestName "${Name}")
|
||||
endif()
|
||||
if(PARSE_CATCH_TESTS_ADD_TARGET_IN_TEST_NAME)
|
||||
set(CTestName "${TestTarget}:${CTestName}")
|
||||
endif()
|
||||
# add target to labels to enable running all tests added from this target
|
||||
set(Labels ${TestTarget})
|
||||
if(TestStringsLength EQUAL 2)
|
||||
list(GET TestStrings 1 Tags)
|
||||
string(TOLOWER "${Tags}" Tags)
|
||||
# remove target from labels if the test is hidden
|
||||
if("${Tags}" MATCHES ".*\\[!?(hide|\\.)\\].*")
|
||||
list(REMOVE_ITEM Labels ${TestTarget})
|
||||
endif()
|
||||
string(REPLACE "]" ";" Tags "${Tags}")
|
||||
string(REPLACE "[" "" Tags "${Tags}")
|
||||
else()
|
||||
# unset tags variable from previous loop
|
||||
unset(Tags)
|
||||
endif()
|
||||
|
||||
list(APPEND Labels ${Tags})
|
||||
|
||||
set(HiddenTagFound OFF)
|
||||
foreach(label ${Labels})
|
||||
string(REGEX MATCH "^!hide|^\\." result ${label})
|
||||
if(result)
|
||||
set(HiddenTagFound ON)
|
||||
break()
|
||||
endif(result)
|
||||
endforeach(label)
|
||||
if(PARSE_CATCH_TESTS_NO_HIDDEN_TESTS AND ${HiddenTagFound} AND ${CMAKE_VERSION} VERSION_LESS "3.9")
|
||||
ParseAndAddCatchTests_PrintDebugMessage("Skipping test \"${CTestName}\" as it has [!hide], [.] or [.foo] label")
|
||||
else()
|
||||
ParseAndAddCatchTests_PrintDebugMessage("Adding test \"${CTestName}\"")
|
||||
if(Labels)
|
||||
ParseAndAddCatchTests_PrintDebugMessage("Setting labels to ${Labels}")
|
||||
endif()
|
||||
|
||||
# Escape commas in the test spec
|
||||
string(REPLACE "," "\\," Name ${Name})
|
||||
|
||||
# Add the test and set its properties
|
||||
add_test(NAME "\"${CTestName}\"" COMMAND ${OptionalCatchTestLauncher} $<TARGET_FILE:${TestTarget}> ${Name} ${AdditionalCatchParameters})
|
||||
# Old CMake versions do not document VERSION_GREATER_EQUAL, so we use VERSION_GREATER with 3.8 instead
|
||||
if(PARSE_CATCH_TESTS_NO_HIDDEN_TESTS AND ${HiddenTagFound} AND ${CMAKE_VERSION} VERSION_GREATER "3.8")
|
||||
ParseAndAddCatchTests_PrintDebugMessage("Setting DISABLED test property")
|
||||
set_tests_properties("\"${CTestName}\"" PROPERTIES DISABLED ON)
|
||||
else()
|
||||
set_tests_properties("\"${CTestName}\"" PROPERTIES FAIL_REGULAR_EXPRESSION "No tests ran"
|
||||
LABELS "${Labels}")
|
||||
endif()
|
||||
set_property(
|
||||
TARGET ${TestTarget}
|
||||
APPEND
|
||||
PROPERTY ParseAndAddCatchTests_TESTS "\"${CTestName}\"")
|
||||
set_property(
|
||||
SOURCE ${SourceFile}
|
||||
APPEND
|
||||
PROPERTY ParseAndAddCatchTests_TESTS "\"${CTestName}\"")
|
||||
endif()
|
||||
|
||||
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
||||
# entry point
|
||||
function(ParseAndAddCatchTests TestTarget)
|
||||
ParseAndAddCatchTests_PrintDebugMessage("Started parsing ${TestTarget}")
|
||||
get_target_property(SourceFiles ${TestTarget} SOURCES)
|
||||
ParseAndAddCatchTests_PrintDebugMessage("Found the following sources: ${SourceFiles}")
|
||||
foreach(SourceFile ${SourceFiles})
|
||||
ParseAndAddCatchTests_ParseFile(${SourceFile} ${TestTarget})
|
||||
endforeach()
|
||||
ParseAndAddCatchTests_PrintDebugMessage("Finished parsing ${TestTarget}")
|
||||
endfunction()
|
|
@ -21,8 +21,7 @@
|
|||
set(NLopt_FOUND FALSE)
|
||||
set(NLopt_ERROR_REASON "")
|
||||
set(NLopt_DEFINITIONS "")
|
||||
set(NLopt_LIBS)
|
||||
|
||||
unset(NLopt_LIBS CACHE)
|
||||
|
||||
set(NLopt_DIR $ENV{NLOPT})
|
||||
if(NOT NLopt_DIR)
|
||||
|
@ -48,15 +47,14 @@ if(NOT NLopt_DIR)
|
|||
set(NLopt_ERROR_REASON "${NLopt_ERROR_REASON} Cannot find NLopt header file '${_NLopt_HEADER_FILE_NAME}'.")
|
||||
endif()
|
||||
unset(_NLopt_HEADER_FILE_NAME)
|
||||
unset(_NLopt_HEADER_FILE)
|
||||
|
||||
|
||||
if(NOT NLopt_FOUND)
|
||||
set(NLopt_ERROR_REASON "${NLopt_ERROR_REASON} NLopt not found in system directories (and environment variable NLOPT is not set).")
|
||||
else()
|
||||
get_filename_component(NLopt_INCLUDE_DIR ${_NLopt_HEADER_FILE} DIRECTORY )
|
||||
endif()
|
||||
|
||||
|
||||
unset(_NLopt_HEADER_FILE CACHE)
|
||||
|
||||
else()
|
||||
|
||||
|
@ -95,7 +93,7 @@ else()
|
|||
set(NLopt_ERROR_REASON "${NLopt_ERROR_REASON} Cannot find NLopt header file '${_NLopt_HEADER_FILE_NAME}' in '${NLopt_INCLUDE_DIR}'.")
|
||||
endif()
|
||||
unset(_NLopt_HEADER_FILE_NAME)
|
||||
unset(_NLopt_HEADER_FILE)
|
||||
unset(_NLopt_HEADER_FILE CACHE)
|
||||
|
||||
endif()
|
||||
|
||||
|
@ -114,10 +112,10 @@ if(NLopt_FOUND)
|
|||
message(STATUS "Found NLopt in '${NLopt_DIR}'.")
|
||||
message(STATUS "Using NLopt include directory '${NLopt_INCLUDE_DIR}'.")
|
||||
message(STATUS "Using NLopt library '${NLopt_LIBS}'.")
|
||||
add_library(Nlopt::Nlopt INTERFACE IMPORTED)
|
||||
set_target_properties(Nlopt::Nlopt PROPERTIES INTERFACE_LINK_LIBRARIES ${NLopt_LIBS})
|
||||
set_target_properties(Nlopt::Nlopt PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${NLopt_INCLUDE_DIR})
|
||||
set_target_properties(Nlopt::Nlopt PROPERTIES INTERFACE_COMPILE_DEFINITIONS "${NLopt_DEFINITIONS}")
|
||||
add_library(NLopt::nlopt INTERFACE IMPORTED)
|
||||
set_target_properties(NLopt::nlopt PROPERTIES INTERFACE_LINK_LIBRARIES ${NLopt_LIBS})
|
||||
set_target_properties(NLopt::nlopt PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${NLopt_INCLUDE_DIR})
|
||||
set_target_properties(NLopt::nlopt PROPERTIES INTERFACE_COMPILE_DEFINITIONS "${NLopt_DEFINITIONS}")
|
||||
# target_link_libraries(Nlopt::Nlopt INTERFACE ${NLopt_LIBS})
|
||||
# target_include_directories(Nlopt::Nlopt INTERFACE ${NLopt_INCLUDE_DIR})
|
||||
# target_compile_definitions(Nlopt::Nlopt INTERFACE ${NLopt_DEFINITIONS})
|
490
cmake/modules/FindOpenVDB.cmake
Normal file
490
cmake/modules/FindOpenVDB.cmake
Normal file
|
@ -0,0 +1,490 @@
|
|||
# Copyright (c) DreamWorks Animation LLC
|
||||
#
|
||||
# All rights reserved. This software is distributed under the
|
||||
# Mozilla Public License 2.0 ( http://www.mozilla.org/MPL/2.0/ )
|
||||
#
|
||||
# Redistributions of source code must retain the above copyright
|
||||
# and license notice and the following restrictions and disclaimer.
|
||||
#
|
||||
# * Neither the name of DreamWorks Animation nor the names of
|
||||
# its contributors may be used to endorse or promote products derived
|
||||
# from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDERS' AND CONTRIBUTORS' AGGREGATE
|
||||
# LIABILITY FOR ALL CLAIMS REGARDLESS OF THEIR BASIS EXCEED US$250.00.
|
||||
#
|
||||
#[=======================================================================[.rst:
|
||||
|
||||
FindOpenVDB
|
||||
-----------
|
||||
|
||||
Find OpenVDB include dirs, libraries and settings
|
||||
|
||||
Use this module by invoking find_package with the form::
|
||||
|
||||
find_package(OpenVDB
|
||||
[version] [EXACT] # Minimum or EXACT version
|
||||
[REQUIRED] # Fail with error if OpenVDB is not found
|
||||
[COMPONENTS <libs>...] # OpenVDB libraries by their canonical name
|
||||
# e.g. "openvdb" for "libopenvdb"
|
||||
)
|
||||
|
||||
IMPORTED Targets
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
``OpenVDB::openvdb``
|
||||
The core openvdb library target.
|
||||
|
||||
Result Variables
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
This will define the following variables:
|
||||
|
||||
``OpenVDB_FOUND``
|
||||
True if the system has the OpenVDB library.
|
||||
``OpenVDB_VERSION``
|
||||
The version of the OpenVDB library which was found.
|
||||
``OpenVDB_INCLUDE_DIRS``
|
||||
Include directories needed to use OpenVDB.
|
||||
``OpenVDB_LIBRARIES``
|
||||
Libraries needed to link to OpenVDB.
|
||||
``OpenVDB_LIBRARY_DIRS``
|
||||
OpenVDB library directories.
|
||||
``OpenVDB_DEFINITIONS``
|
||||
Definitions to use when compiling code that uses OpenVDB.
|
||||
``OpenVDB_{COMPONENT}_FOUND``
|
||||
True if the system has the named OpenVDB component.
|
||||
``OpenVDB_USES_BLOSC``
|
||||
True if the OpenVDB Library has been built with blosc support
|
||||
``OpenVDB_USES_LOG4CPLUS``
|
||||
True if the OpenVDB Library has been built with log4cplus support
|
||||
``OpenVDB_USES_EXR``
|
||||
True if the OpenVDB Library has been built with openexr support
|
||||
``OpenVDB_ABI``
|
||||
Set if this module was able to determine the ABI number the located
|
||||
OpenVDB Library was built against. Unset otherwise.
|
||||
|
||||
Cache Variables
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
The following cache variables may also be set:
|
||||
|
||||
``OpenVDB_INCLUDE_DIR``
|
||||
The directory containing ``openvdb/version.h``.
|
||||
``OpenVDB_{COMPONENT}_LIBRARY``
|
||||
Individual component libraries for OpenVDB
|
||||
|
||||
Hints
|
||||
^^^^^
|
||||
|
||||
Instead of explicitly setting the cache variables, the following variables
|
||||
may be provided to tell this module where to look.
|
||||
|
||||
``OPENVDB_ROOT``
|
||||
Preferred installation prefix.
|
||||
``OPENVDB_INCLUDEDIR``
|
||||
Preferred include directory e.g. <prefix>/include
|
||||
``OPENVDB_LIBRARYDIR``
|
||||
Preferred library directory e.g. <prefix>/lib
|
||||
``SYSTEM_LIBRARY_PATHS``
|
||||
Paths appended to all include and lib searches.
|
||||
|
||||
#]=======================================================================]
|
||||
|
||||
cmake_minimum_required(VERSION 3.3)
|
||||
# Monitoring <PackageName>_ROOT variables
|
||||
if(POLICY CMP0074)
|
||||
cmake_policy(SET CMP0074 NEW)
|
||||
endif()
|
||||
|
||||
# Include utility functions for version information
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/OpenVDBUtils.cmake)
|
||||
|
||||
mark_as_advanced(
|
||||
OpenVDB_INCLUDE_DIR
|
||||
OpenVDB_LIBRARY
|
||||
)
|
||||
|
||||
set(_OPENVDB_COMPONENT_LIST
|
||||
openvdb
|
||||
)
|
||||
|
||||
if(OpenVDB_FIND_COMPONENTS)
|
||||
set(OPENVDB_COMPONENTS_PROVIDED TRUE)
|
||||
set(_IGNORED_COMPONENTS "")
|
||||
foreach(COMPONENT ${OpenVDB_FIND_COMPONENTS})
|
||||
if(NOT ${COMPONENT} IN_LIST _OPENVDB_COMPONENT_LIST)
|
||||
list(APPEND _IGNORED_COMPONENTS ${COMPONENT})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
if(_IGNORED_COMPONENTS)
|
||||
message(STATUS "Ignoring unknown components of OpenVDB:")
|
||||
foreach(COMPONENT ${_IGNORED_COMPONENTS})
|
||||
message(STATUS " ${COMPONENT}")
|
||||
endforeach()
|
||||
list(REMOVE_ITEM OpenVDB_FIND_COMPONENTS ${_IGNORED_COMPONENTS})
|
||||
endif()
|
||||
else()
|
||||
set(OPENVDB_COMPONENTS_PROVIDED FALSE)
|
||||
set(OpenVDB_FIND_COMPONENTS ${_OPENVDB_COMPONENT_LIST})
|
||||
endif()
|
||||
|
||||
# Append OPENVDB_ROOT or $ENV{OPENVDB_ROOT} if set (prioritize the direct cmake var)
|
||||
set(_OPENVDB_ROOT_SEARCH_DIR "")
|
||||
|
||||
# Additionally try and use pkconfig to find OpenVDB
|
||||
|
||||
find_package(PkgConfig)
|
||||
pkg_check_modules(PC_OpenVDB QUIET OpenVDB)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Search for OpenVDB include DIR
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
set(_OPENVDB_INCLUDE_SEARCH_DIRS "")
|
||||
list(APPEND _OPENVDB_INCLUDE_SEARCH_DIRS
|
||||
${OPENVDB_INCLUDEDIR}
|
||||
${_OPENVDB_ROOT_SEARCH_DIR}
|
||||
${PC_OpenVDB_INCLUDE_DIRS}
|
||||
${SYSTEM_LIBRARY_PATHS}
|
||||
)
|
||||
|
||||
# Look for a standard OpenVDB header file.
|
||||
find_path(OpenVDB_INCLUDE_DIR openvdb/version.h
|
||||
PATHS ${_OPENVDB_INCLUDE_SEARCH_DIRS}
|
||||
PATH_SUFFIXES include
|
||||
)
|
||||
|
||||
OPENVDB_VERSION_FROM_HEADER("${OpenVDB_INCLUDE_DIR}/openvdb/version.h"
|
||||
VERSION OpenVDB_VERSION
|
||||
MAJOR OpenVDB_MAJOR_VERSION
|
||||
MINOR OpenVDB_MINOR_VERSION
|
||||
PATCH OpenVDB_PATCH_VERSION
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Search for OPENVDB lib DIR
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
set(_OPENVDB_LIBRARYDIR_SEARCH_DIRS "")
|
||||
|
||||
# Append to _OPENVDB_LIBRARYDIR_SEARCH_DIRS in priority order
|
||||
|
||||
list(APPEND _OPENVDB_LIBRARYDIR_SEARCH_DIRS
|
||||
${OPENVDB_LIBRARYDIR}
|
||||
${_OPENVDB_ROOT_SEARCH_DIR}
|
||||
${PC_OpenVDB_LIBRARY_DIRS}
|
||||
${SYSTEM_LIBRARY_PATHS}
|
||||
)
|
||||
|
||||
# Build suffix directories
|
||||
|
||||
set(OPENVDB_PATH_SUFFIXES
|
||||
lib64
|
||||
lib
|
||||
)
|
||||
|
||||
# Static library setup
|
||||
if(UNIX AND OPENVDB_USE_STATIC_LIBS)
|
||||
set(_OPENVDB_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
endif()
|
||||
|
||||
set(OpenVDB_LIB_COMPONENTS "")
|
||||
|
||||
foreach(COMPONENT ${OpenVDB_FIND_COMPONENTS})
|
||||
set(LIB_NAME ${COMPONENT})
|
||||
find_library(OpenVDB_${COMPONENT}_LIBRARY ${LIB_NAME} lib${LIB_NAME}
|
||||
PATHS ${_OPENVDB_LIBRARYDIR_SEARCH_DIRS}
|
||||
PATH_SUFFIXES ${OPENVDB_PATH_SUFFIXES}
|
||||
)
|
||||
list(APPEND OpenVDB_LIB_COMPONENTS ${OpenVDB_${COMPONENT}_LIBRARY})
|
||||
|
||||
if(OpenVDB_${COMPONENT}_LIBRARY)
|
||||
set(OpenVDB_${COMPONENT}_FOUND TRUE)
|
||||
else()
|
||||
set(OpenVDB_${COMPONENT}_FOUND FALSE)
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
if(UNIX AND OPENVDB_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_OPENVDB_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
unset(_OPENVDB_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||
endif()
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Cache and set OPENVDB_FOUND
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(OpenVDB
|
||||
FOUND_VAR OpenVDB_FOUND
|
||||
REQUIRED_VARS
|
||||
OpenVDB_INCLUDE_DIR
|
||||
OpenVDB_LIB_COMPONENTS
|
||||
VERSION_VAR OpenVDB_VERSION
|
||||
HANDLE_COMPONENTS
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Determine ABI number
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Set the ABI number the library was built against. Uses vdb_print
|
||||
find_program(OPENVDB_PRINT vdb_print PATHS ${OpenVDB_INCLUDE_DIR} )
|
||||
|
||||
OPENVDB_ABI_VERSION_FROM_PRINT(
|
||||
"${OPENVDB_PRINT}"
|
||||
ABI OpenVDB_ABI
|
||||
)
|
||||
|
||||
if(NOT OpenVDB_FIND_QUIET)
|
||||
if(NOT OpenVDB_ABI)
|
||||
message(WARNING "Unable to determine OpenVDB ABI version from OpenVDB "
|
||||
"installation. The library major version \"${OpenVDB_MAJOR_VERSION}\" "
|
||||
"will be inferred. If this is not correct, use "
|
||||
"add_definitions(-DOPENVDB_ABI_VERSION_NUMBER=N)"
|
||||
)
|
||||
else()
|
||||
message(STATUS "OpenVDB ABI Version: ${OpenVDB_ABI}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Handle OpenVDB dependencies
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
# Add standard dependencies
|
||||
|
||||
find_package(IlmBase COMPONENTS Half)
|
||||
if(NOT IlmBase_FOUND)
|
||||
pkg_check_modules(IlmBase QUIET IlmBase)
|
||||
endif()
|
||||
if (IlmBase_FOUND AND NOT TARGET IlmBase::Half)
|
||||
message(STATUS "Falling back to IlmBase found by pkg-config...")
|
||||
|
||||
find_library(IlmHalf_LIBRARY NAMES Half)
|
||||
if(IlmHalf_LIBRARY-NOTFOUND)
|
||||
message(FATAL_ERROR "IlmBase::Half can not be found!")
|
||||
endif()
|
||||
|
||||
add_library(IlmBase::Half UNKNOWN IMPORTED)
|
||||
set_target_properties(IlmBase::Half PROPERTIES
|
||||
IMPORTED_LOCATION "${IlmHalf_LIBRARY}"
|
||||
INTERFACE_INCLUDE_DIRECTORIES ${IlmBase_INCLUDE_DIRS})
|
||||
elseif(NOT IlmBase_FOUND)
|
||||
message(FATAL_ERROR "IlmBase::Half can not be found!")
|
||||
endif()
|
||||
find_package(TBB REQUIRED COMPONENTS tbb)
|
||||
find_package(ZLIB REQUIRED)
|
||||
find_package(Boost REQUIRED COMPONENTS iostreams system)
|
||||
|
||||
# Use GetPrerequisites to see which libraries this OpenVDB lib has linked to
|
||||
# which we can query for optional deps. This basically runs ldd/otoll/objdump
|
||||
# etc to track deps. We could use a vdb_config binary tools here to improve
|
||||
# this process
|
||||
|
||||
include(GetPrerequisites)
|
||||
|
||||
set(_EXCLUDE_SYSTEM_PREREQUISITES 1)
|
||||
set(_RECURSE_PREREQUISITES 0)
|
||||
set(_OPENVDB_PREREQUISITE_LIST)
|
||||
|
||||
if(NOT OPENVDB_USE_STATIC_LIBS)
|
||||
get_prerequisites(${OpenVDB_openvdb_LIBRARY}
|
||||
_OPENVDB_PREREQUISITE_LIST
|
||||
${_EXCLUDE_SYSTEM_PREREQUISITES}
|
||||
${_RECURSE_PREREQUISITES}
|
||||
""
|
||||
"${SYSTEM_LIBRARY_PATHS}"
|
||||
)
|
||||
endif()
|
||||
|
||||
unset(_EXCLUDE_SYSTEM_PREREQUISITES)
|
||||
unset(_RECURSE_PREREQUISITES)
|
||||
|
||||
# As the way we resolve optional libraries relies on library file names, use
|
||||
# the configuration options from the main CMakeLists.txt to allow users
|
||||
# to manually identify the requirements of OpenVDB builds if they know them.
|
||||
|
||||
set(OpenVDB_USES_BLOSC ${USE_BLOSC})
|
||||
set(OpenVDB_USES_LOG4CPLUS ${USE_LOG4CPLUS})
|
||||
set(OpenVDB_USES_ILM ${USE_EXR})
|
||||
set(OpenVDB_USES_EXR ${USE_EXR})
|
||||
|
||||
# Search for optional dependencies
|
||||
|
||||
foreach(PREREQUISITE ${_OPENVDB_PREREQUISITE_LIST})
|
||||
set(_HAS_DEP)
|
||||
get_filename_component(PREREQUISITE ${PREREQUISITE} NAME)
|
||||
|
||||
string(FIND ${PREREQUISITE} "blosc" _HAS_DEP)
|
||||
if(NOT ${_HAS_DEP} EQUAL -1)
|
||||
set(OpenVDB_USES_BLOSC ON)
|
||||
endif()
|
||||
|
||||
string(FIND ${PREREQUISITE} "log4cplus" _HAS_DEP)
|
||||
if(NOT ${_HAS_DEP} EQUAL -1)
|
||||
set(OpenVDB_USES_LOG4CPLUS ON)
|
||||
endif()
|
||||
|
||||
string(FIND ${PREREQUISITE} "IlmImf" _HAS_DEP)
|
||||
if(NOT ${_HAS_DEP} EQUAL -1)
|
||||
set(OpenVDB_USES_ILM ON)
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
unset(_OPENVDB_PREREQUISITE_LIST)
|
||||
unset(_HAS_DEP)
|
||||
|
||||
if(OpenVDB_USES_BLOSC)
|
||||
find_package(Blosc )
|
||||
if(NOT Blosc_FOUND OR NOT TARGET Blosc::blosc)
|
||||
message(STATUS "find_package could not find Blosc. Using fallback blosc search...")
|
||||
find_path(Blosc_INCLUDE_DIR blosc.h)
|
||||
find_library(Blosc_LIBRARY NAMES blosc)
|
||||
if (Blosc_INCLUDE_DIR AND Blosc_LIBRARY)
|
||||
set(Blosc_FOUND TRUE)
|
||||
add_library(Blosc::blosc UNKNOWN IMPORTED)
|
||||
set_target_properties(Blosc::blosc PROPERTIES
|
||||
IMPORTED_LOCATION "${Blosc_LIBRARY}"
|
||||
INTERFACE_INCLUDE_DIRECTORIES ${Blosc_INCLUDE_DIR})
|
||||
elseif()
|
||||
message(FATAL_ERROR "Blosc library can not be found!")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(OpenVDB_USES_LOG4CPLUS)
|
||||
find_package(Log4cplus REQUIRED)
|
||||
endif()
|
||||
|
||||
if(OpenVDB_USES_ILM)
|
||||
find_package(IlmBase REQUIRED)
|
||||
endif()
|
||||
|
||||
if(OpenVDB_USES_EXR)
|
||||
find_package(OpenEXR REQUIRED)
|
||||
endif()
|
||||
|
||||
if(UNIX)
|
||||
find_package(Threads REQUIRED)
|
||||
endif()
|
||||
|
||||
# Set deps. Note that the order here is important. If we're building against
|
||||
# Houdini 17.5 we must include OpenEXR and IlmBase deps first to ensure the
|
||||
# users chosen namespaced headers are correctly prioritized. Otherwise other
|
||||
# include paths from shared installs (including houdini) may pull in the wrong
|
||||
# headers
|
||||
|
||||
set(_OPENVDB_VISIBLE_DEPENDENCIES
|
||||
Boost::iostreams
|
||||
Boost::system
|
||||
IlmBase::Half
|
||||
)
|
||||
|
||||
set(_OPENVDB_DEFINITIONS)
|
||||
if(OpenVDB_ABI)
|
||||
list(APPEND _OPENVDB_DEFINITIONS "-DOPENVDB_ABI_VERSION_NUMBER=${OpenVDB_ABI}")
|
||||
endif()
|
||||
|
||||
if(OpenVDB_USES_EXR)
|
||||
list(APPEND _OPENVDB_VISIBLE_DEPENDENCIES
|
||||
IlmBase::IlmThread
|
||||
IlmBase::Iex
|
||||
IlmBase::Imath
|
||||
OpenEXR::IlmImf
|
||||
)
|
||||
list(APPEND _OPENVDB_DEFINITIONS "-DOPENVDB_TOOLS_RAYTRACER_USE_EXR")
|
||||
endif()
|
||||
|
||||
if(OpenVDB_USES_LOG4CPLUS)
|
||||
list(APPEND _OPENVDB_VISIBLE_DEPENDENCIES Log4cplus::log4cplus)
|
||||
list(APPEND _OPENVDB_DEFINITIONS "-DOPENVDB_USE_LOG4CPLUS")
|
||||
endif()
|
||||
|
||||
list(APPEND _OPENVDB_VISIBLE_DEPENDENCIES
|
||||
TBB::tbb
|
||||
)
|
||||
if(UNIX)
|
||||
list(APPEND _OPENVDB_VISIBLE_DEPENDENCIES
|
||||
Threads::Threads
|
||||
)
|
||||
endif()
|
||||
|
||||
set(_OPENVDB_HIDDEN_DEPENDENCIES)
|
||||
|
||||
if(OpenVDB_USES_BLOSC)
|
||||
if(OPENVDB_USE_STATIC_LIBS)
|
||||
list(APPEND _OPENVDB_VISIBLE_DEPENDENCIES $<LINK_ONLY:Blosc::blosc>)
|
||||
else()
|
||||
list(APPEND _OPENVDB_HIDDEN_DEPENDENCIES Blosc::blosc)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(OPENVDB_USE_STATIC_LIBS)
|
||||
list(APPEND _OPENVDB_VISIBLE_DEPENDENCIES $<LINK_ONLY:ZLIB::ZLIB>)
|
||||
else()
|
||||
list(APPEND _OPENVDB_HIDDEN_DEPENDENCIES ZLIB::ZLIB)
|
||||
endif()
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Configure imported target
|
||||
# ------------------------------------------------------------------------
|
||||
|
||||
set(OpenVDB_LIBRARIES
|
||||
${OpenVDB_LIB_COMPONENTS}
|
||||
)
|
||||
set(OpenVDB_INCLUDE_DIRS ${OpenVDB_INCLUDE_DIR})
|
||||
|
||||
set(OpenVDB_DEFINITIONS)
|
||||
list(APPEND OpenVDB_DEFINITIONS "${PC_OpenVDB_CFLAGS_OTHER}")
|
||||
list(APPEND OpenVDB_DEFINITIONS "${_OPENVDB_DEFINITIONS}")
|
||||
list(REMOVE_DUPLICATES OpenVDB_DEFINITIONS)
|
||||
|
||||
set(OpenVDB_LIBRARY_DIRS "")
|
||||
foreach(LIB ${OpenVDB_LIB_COMPONENTS})
|
||||
get_filename_component(_OPENVDB_LIBDIR ${LIB} DIRECTORY)
|
||||
list(APPEND OpenVDB_LIBRARY_DIRS ${_OPENVDB_LIBDIR})
|
||||
endforeach()
|
||||
list(REMOVE_DUPLICATES OpenVDB_LIBRARY_DIRS)
|
||||
|
||||
foreach(COMPONENT ${OpenVDB_FIND_COMPONENTS})
|
||||
if(NOT TARGET OpenVDB::${COMPONENT})
|
||||
add_library(OpenVDB::${COMPONENT} UNKNOWN IMPORTED)
|
||||
set_target_properties(OpenVDB::${COMPONENT} PROPERTIES
|
||||
IMPORTED_LOCATION "${OpenVDB_${COMPONENT}_LIBRARY}"
|
||||
INTERFACE_COMPILE_OPTIONS "${OpenVDB_DEFINITIONS}"
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${OpenVDB_INCLUDE_DIR}"
|
||||
IMPORTED_LINK_DEPENDENT_LIBRARIES "${_OPENVDB_HIDDEN_DEPENDENCIES}" # non visible deps
|
||||
INTERFACE_LINK_LIBRARIES "${_OPENVDB_VISIBLE_DEPENDENCIES}" # visible deps (headers)
|
||||
INTERFACE_COMPILE_FEATURES cxx_std_11
|
||||
)
|
||||
|
||||
if (OPENVDB_USE_STATIC_LIBS)
|
||||
set_target_properties(OpenVDB::${COMPONENT} PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS "OPENVDB_STATICLIB;OPENVDB_OPENEXR_STATICLIB"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
if(OpenVDB_FOUND AND NOT ${CMAKE_FIND_PACKAGE_NAME}_FIND_QUIETLY)
|
||||
message(STATUS "OpenVDB libraries: ${OpenVDB_LIBRARIES}")
|
||||
endif()
|
||||
|
||||
unset(_OPENVDB_DEFINITIONS)
|
||||
unset(_OPENVDB_VISIBLE_DEPENDENCIES)
|
||||
unset(_OPENVDB_HIDDEN_DEPENDENCIES)
|
|
@ -93,8 +93,16 @@
|
|||
# This module will also create the "tbb" target that may be used when building
|
||||
# executables and libraries.
|
||||
|
||||
unset(TBB_FOUND CACHE)
|
||||
unset(TBB_INCLUDE_DIRS CACHE)
|
||||
unset(TBB_LIBRARIES)
|
||||
unset(TBB_LIBRARIES_DEBUG)
|
||||
unset(TBB_LIBRARIES_RELEASE)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
find_package(Threads QUIET REQUIRED)
|
||||
|
||||
if(NOT TBB_FOUND)
|
||||
|
||||
##################################
|
||||
|
@ -215,6 +223,9 @@ if(NOT TBB_FOUND)
|
|||
foreach(_comp ${TBB_SEARCH_COMPOMPONENTS})
|
||||
if(";${TBB_FIND_COMPONENTS};tbb;" MATCHES ";${_comp};")
|
||||
|
||||
unset(TBB_${_comp}_LIBRARY_DEBUG CACHE)
|
||||
unset(TBB_${_comp}_LIBRARY_RELEASE CACHE)
|
||||
|
||||
# Search for the libraries
|
||||
find_library(TBB_${_comp}_LIBRARY_RELEASE ${_comp}${TBB_STATIC_SUFFIX}
|
||||
HINTS ${TBB_LIBRARY} ${TBB_SEARCH_DIR}
|
||||
|
@ -250,28 +261,31 @@ if(NOT TBB_FOUND)
|
|||
endif()
|
||||
endforeach()
|
||||
|
||||
unset(TBB_STATIC_SUFFIX)
|
||||
|
||||
##################################
|
||||
# Set compile flags and libraries
|
||||
##################################
|
||||
|
||||
set(TBB_DEFINITIONS_RELEASE "")
|
||||
set(TBB_DEFINITIONS_DEBUG "-DTBB_USE_DEBUG=1")
|
||||
set(TBB_DEFINITIONS_DEBUG "TBB_USE_DEBUG=1")
|
||||
|
||||
if(TBB_LIBRARIES_${TBB_BUILD_TYPE})
|
||||
set(TBB_DEFINITIONS "${TBB_DEFINITIONS_${TBB_BUILD_TYPE}}")
|
||||
set(TBB_LIBRARIES "${TBB_LIBRARIES_${TBB_BUILD_TYPE}}")
|
||||
elseif(TBB_LIBRARIES_RELEASE)
|
||||
set(TBB_DEFINITIONS "${TBB_DEFINITIONS_RELEASE}")
|
||||
set(TBB_LIBRARIES "${TBB_LIBRARIES_RELEASE}")
|
||||
elseif(TBB_LIBRARIES_DEBUG)
|
||||
set(TBB_DEFINITIONS "${TBB_DEFINITIONS_DEBUG}")
|
||||
set(TBB_LIBRARIES "${TBB_LIBRARIES_DEBUG}")
|
||||
endif()
|
||||
|
||||
if(NOT MSVC AND NOT TBB_LIBRARIES)
|
||||
set(TBB_LIBRARIES ${TBB_LIBRARIES_RELEASE})
|
||||
endif()
|
||||
|
||||
set(TBB_DEFINITIONS "")
|
||||
if (MSVC AND TBB_STATIC)
|
||||
set(TBB_DEFINITIONS __TBB_NO_IMPLICIT_LINKAGE)
|
||||
endif ()
|
||||
|
||||
unset (TBB_STATIC_SUFFIX)
|
||||
|
||||
find_package_handle_standard_args(TBB
|
||||
REQUIRED_VARS TBB_INCLUDE_DIRS TBB_LIBRARIES
|
||||
FAIL_MESSAGE "TBB library cannot be found. Consider set TBBROOT environment variable."
|
||||
HANDLE_COMPONENTS
|
||||
VERSION_VAR TBB_VERSION)
|
||||
|
||||
|
@ -280,25 +294,20 @@ if(NOT TBB_FOUND)
|
|||
##################################
|
||||
|
||||
if(NOT CMAKE_VERSION VERSION_LESS 3.0 AND TBB_FOUND)
|
||||
add_library(tbb UNKNOWN IMPORTED)
|
||||
set_target_properties(tbb PROPERTIES
|
||||
add_library(TBB::tbb UNKNOWN IMPORTED)
|
||||
set_target_properties(TBB::tbb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS "${TBB_DEFINITIONS}"
|
||||
INTERFACE_LINK_LIBRARIES "Threads::Threads;${CMAKE_DL_LIBS}"
|
||||
INTERFACE_INCLUDE_DIRECTORIES ${TBB_INCLUDE_DIRS}
|
||||
IMPORTED_LOCATION ${TBB_LIBRARIES})
|
||||
if(TBB_LIBRARIES_RELEASE AND TBB_LIBRARIES_DEBUG)
|
||||
set_target_properties(tbb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS "$<$<OR:$<CONFIG:Debug>,$<CONFIG:RelWithDebInfo>>:TBB_USE_DEBUG=1>"
|
||||
set_target_properties(TBB::tbb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS "${TBB_DEFINITIONS};$<$<OR:$<CONFIG:Debug>,$<CONFIG:RelWithDebInfo>>:${TBB_DEFINITIONS_DEBUG}>;$<$<CONFIG:Release>:${TBB_DEFINITIONS_RELEASE}>"
|
||||
IMPORTED_LOCATION_DEBUG ${TBB_LIBRARIES_DEBUG}
|
||||
IMPORTED_LOCATION_RELWITHDEBINFO ${TBB_LIBRARIES_RELEASE}
|
||||
IMPORTED_LOCATION_RELEASE ${TBB_LIBRARIES_RELEASE}
|
||||
IMPORTED_LOCATION_MINSIZEREL ${TBB_LIBRARIES_RELEASE}
|
||||
)
|
||||
elseif(TBB_LIBRARIES_RELEASE)
|
||||
set_target_properties(tbb PROPERTIES IMPORTED_LOCATION ${TBB_LIBRARIES_RELEASE})
|
||||
else()
|
||||
set_target_properties(tbb PROPERTIES
|
||||
INTERFACE_COMPILE_DEFINITIONS "${TBB_DEFINITIONS_DEBUG}"
|
||||
IMPORTED_LOCATION ${TBB_LIBRARIES_DEBUG}
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
|
166
cmake/modules/OpenVDBUtils.cmake
Normal file
166
cmake/modules/OpenVDBUtils.cmake
Normal file
|
@ -0,0 +1,166 @@
|
|||
# Copyright (c) DreamWorks Animation LLC
|
||||
#
|
||||
# All rights reserved. This software is distributed under the
|
||||
# Mozilla Public License 2.0 ( http://www.mozilla.org/MPL/2.0/ )
|
||||
#
|
||||
# Redistributions of source code must retain the above copyright
|
||||
# and license notice and the following restrictions and disclaimer.
|
||||
#
|
||||
# * Neither the name of DreamWorks Animation nor the names of
|
||||
# its contributors may be used to endorse or promote products derived
|
||||
# from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
# IN NO EVENT SHALL THE COPYRIGHT HOLDERS' AND CONTRIBUTORS' AGGREGATE
|
||||
# LIABILITY FOR ALL CLAIMS REGARDLESS OF THEIR BASIS EXCEED US$250.00.
|
||||
#
|
||||
#[=======================================================================[.rst:
|
||||
|
||||
OpenVDBUtils.cmake
|
||||
------------------
|
||||
|
||||
A utility CMake file which provides helper functions for configuring an
|
||||
OpenVDB installation.
|
||||
|
||||
Use this module by invoking include with the form::
|
||||
|
||||
include ( OpenVDBUtils )
|
||||
|
||||
|
||||
The following functions are provided:
|
||||
|
||||
``OPENVDB_VERSION_FROM_HEADER``
|
||||
|
||||
OPENVDB_VERSION_FROM_HEADER ( <header_path>
|
||||
VERSION [<version>]
|
||||
MAJOR [<version>]
|
||||
MINOR [<version>]
|
||||
PATCH [<version>] )
|
||||
|
||||
Parse the provided version file to retrieve the current OpenVDB
|
||||
version information. The file is expected to be a version.h file
|
||||
as found in the following path of an OpenVDB repository:
|
||||
openvdb/version.h
|
||||
|
||||
If the file does not exist, variables are unmodified.
|
||||
|
||||
``OPENVDB_ABI_VERSION_FROM_PRINT``
|
||||
|
||||
OPENVDB_ABI_VERSION_FROM_PRINT ( <vdb_print>
|
||||
[QUIET]
|
||||
ABI [<version>] )
|
||||
|
||||
Retrieve the ABI version that an installation of OpenVDB was compiled
|
||||
for using the provided vdb_print binary. Parses the result of:
|
||||
vdb_print --version
|
||||
|
||||
If the binary does not exist or fails to launch, variables are
|
||||
unmodified.
|
||||
|
||||
#]=======================================================================]
|
||||
|
||||
|
||||
function(OPENVDB_VERSION_FROM_HEADER OPENVDB_VERSION_FILE)
|
||||
cmake_parse_arguments(_VDB "" "VERSION;MAJOR;MINOR;PATCH" "" ${ARGN})
|
||||
|
||||
if(NOT EXISTS ${OPENVDB_VERSION_FILE})
|
||||
return()
|
||||
endif()
|
||||
|
||||
file(STRINGS "${OPENVDB_VERSION_FILE}" openvdb_version_str
|
||||
REGEX "^#define[\t ]+OPENVDB_LIBRARY_MAJOR_VERSION_NUMBER[\t ]+.*"
|
||||
)
|
||||
string(REGEX REPLACE "^.*OPENVDB_LIBRARY_MAJOR_VERSION_NUMBER[\t ]+([0-9]*).*$" "\\1"
|
||||
_OpenVDB_MAJOR_VERSION "${openvdb_version_str}"
|
||||
)
|
||||
|
||||
file(STRINGS "${OPENVDB_VERSION_FILE}" openvdb_version_str
|
||||
REGEX "^#define[\t ]+OPENVDB_LIBRARY_MINOR_VERSION_NUMBER[\t ]+.*"
|
||||
)
|
||||
string(REGEX REPLACE "^.*OPENVDB_LIBRARY_MINOR_VERSION_NUMBER[\t ]+([0-9]*).*$" "\\1"
|
||||
_OpenVDB_MINOR_VERSION "${openvdb_version_str}"
|
||||
)
|
||||
|
||||
file(STRINGS "${OPENVDB_VERSION_FILE}" openvdb_version_str
|
||||
REGEX "^#define[\t ]+OPENVDB_LIBRARY_PATCH_VERSION_NUMBER[\t ]+.*"
|
||||
)
|
||||
string(REGEX REPLACE "^.*OPENVDB_LIBRARY_PATCH_VERSION_NUMBER[\t ]+([0-9]*).*$" "\\1"
|
||||
_OpenVDB_PATCH_VERSION "${openvdb_version_str}"
|
||||
)
|
||||
unset(openvdb_version_str)
|
||||
|
||||
if(_VDB_VERSION)
|
||||
set(${_VDB_VERSION}
|
||||
${_OpenVDB_MAJOR_VERSION}.${_OpenVDB_MINOR_VERSION}.${_OpenVDB_PATCH_VERSION}
|
||||
PARENT_SCOPE
|
||||
)
|
||||
endif()
|
||||
if(_VDB_MAJOR)
|
||||
set(${_VDB_MAJOR} ${_OpenVDB_MAJOR_VERSION} PARENT_SCOPE)
|
||||
endif()
|
||||
if(_VDB_MINOR)
|
||||
set(${_VDB_MINOR} ${_OpenVDB_MINOR_VERSION} PARENT_SCOPE)
|
||||
endif()
|
||||
if(_VDB_PATCH)
|
||||
set(${_VDB_PATCH} ${_OpenVDB_PATCH_VERSION} PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
|
||||
########################################################################
|
||||
########################################################################
|
||||
|
||||
|
||||
function(OPENVDB_ABI_VERSION_FROM_PRINT OPENVDB_PRINT)
|
||||
cmake_parse_arguments(_VDB "QUIET" "ABI" "" ${ARGN})
|
||||
|
||||
if(NOT EXISTS ${OPENVDB_PRINT})
|
||||
message(WARNING "vdb_print not found! ${OPENVDB_PRINT}")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(_VDB_PRINT_VERSION_STRING "")
|
||||
set(_VDB_PRINT_RETURN_STATUS "")
|
||||
|
||||
if(${_VDB_QUIET})
|
||||
execute_process(COMMAND ${OPENVDB_PRINT} "--version"
|
||||
RESULT_VARIABLE _VDB_PRINT_RETURN_STATUS
|
||||
OUTPUT_VARIABLE _VDB_PRINT_VERSION_STRING
|
||||
ERROR_QUIET
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
else()
|
||||
execute_process(COMMAND ${OPENVDB_PRINT} "--version"
|
||||
RESULT_VARIABLE _VDB_PRINT_RETURN_STATUS
|
||||
OUTPUT_VARIABLE _VDB_PRINT_VERSION_STRING
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
endif()
|
||||
|
||||
if(${_VDB_PRINT_RETURN_STATUS})
|
||||
message(WARNING "vdb_print returned with status ${_VDB_PRINT_RETURN_STATUS}")
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(_OpenVDB_ABI)
|
||||
string(REGEX REPLACE ".*abi([0-9]*).*" "\\1" _OpenVDB_ABI ${_VDB_PRINT_VERSION_STRING})
|
||||
if(${_OpenVDB_ABI} STREQUAL ${_VDB_PRINT_VERSION_STRING})
|
||||
set(_OpenVDB_ABI "")
|
||||
endif()
|
||||
unset(_VDB_PRINT_RETURN_STATUS)
|
||||
unset(_VDB_PRINT_VERSION_STRING)
|
||||
|
||||
if(_VDB_ABI)
|
||||
set(${_VDB_ABI} ${_OpenVDB_ABI} PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
4
deps/CMakeLists.txt
vendored
4
deps/CMakeLists.txt
vendored
|
@ -72,7 +72,7 @@ elseif (APPLE)
|
|||
message(FATAL_ERROR "Could not determine OS X SDK version. Please use -DCMAKE_OSX_DEPLOYMENT_TARGET=<version>")
|
||||
endif ()
|
||||
|
||||
message("OS X Deployment Target (inferred from default): ${DEP_OSX_TARGET}")
|
||||
message("OS X Deployment Target (inferred from SDK): ${DEP_OSX_TARGET}")
|
||||
endif ()
|
||||
|
||||
include("deps-macos.cmake")
|
||||
|
@ -96,6 +96,7 @@ if (MSVC)
|
|||
dep_nlopt
|
||||
# dep_qhull # Experimental
|
||||
dep_zlib # on Windows we still need zlib
|
||||
dep_openvdb
|
||||
)
|
||||
|
||||
else()
|
||||
|
@ -110,6 +111,7 @@ else()
|
|||
dep_cereal
|
||||
dep_nlopt
|
||||
dep_qhull
|
||||
dep_openvdb
|
||||
# dep_libigl # Not working, static build has different Eigen
|
||||
)
|
||||
|
||||
|
|
468
deps/blosc-mods.patch
vendored
Normal file
468
deps/blosc-mods.patch
vendored
Normal file
|
@ -0,0 +1,468 @@
|
|||
From 5669891dfaaa4c814f3ec667ca6bf4e693aea978 Mon Sep 17 00:00:00 2001
|
||||
From: tamasmeszaros <meszaros.q@gmail.com>
|
||||
Date: Wed, 30 Oct 2019 12:54:52 +0100
|
||||
Subject: [PATCH] Blosc 1.17 fixes and cmake config script
|
||||
|
||||
---
|
||||
CMakeLists.txt | 105 +++++++++++++++++-----------------
|
||||
blosc/CMakeLists.txt | 118 +++++++++------------------------------
|
||||
cmake/FindLZ4.cmake | 6 +-
|
||||
cmake/FindSnappy.cmake | 8 ++-
|
||||
cmake/FindZstd.cmake | 8 ++-
|
||||
cmake_config.cmake.in | 24 ++++++++
|
||||
internal-complibs/CMakeLists.txt | 35 ++++++++++++
|
||||
7 files changed, 157 insertions(+), 147 deletions(-)
|
||||
create mode 100644 cmake_config.cmake.in
|
||||
create mode 100644 internal-complibs/CMakeLists.txt
|
||||
|
||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index 59d9fab..e9134c2 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -71,7 +71,7 @@
|
||||
# DEV: static includes blosc.a and blosc.h
|
||||
|
||||
|
||||
-cmake_minimum_required(VERSION 2.8.12)
|
||||
+cmake_minimum_required(VERSION 3.1) # Threads::Threads target available from 3.1
|
||||
if (NOT CMAKE_VERSION VERSION_LESS 3.3)
|
||||
cmake_policy(SET CMP0063 NEW)
|
||||
endif()
|
||||
@@ -124,55 +124,30 @@ option(PREFER_EXTERNAL_ZSTD
|
||||
|
||||
set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake")
|
||||
|
||||
-
|
||||
-if(NOT DEACTIVATE_LZ4)
|
||||
- if(PREFER_EXTERNAL_LZ4)
|
||||
- find_package(LZ4)
|
||||
- else()
|
||||
- message(STATUS "Using LZ4 internal sources.")
|
||||
- endif(PREFER_EXTERNAL_LZ4)
|
||||
- # HAVE_LZ4 will be set to true because even if the library is
|
||||
- # not found, we will use the included sources for it
|
||||
- set(HAVE_LZ4 TRUE)
|
||||
-endif(NOT DEACTIVATE_LZ4)
|
||||
-
|
||||
-if(NOT DEACTIVATE_SNAPPY)
|
||||
- if(PREFER_EXTERNAL_SNAPPY)
|
||||
- find_package(Snappy)
|
||||
- else()
|
||||
- message(STATUS "Using Snappy internal sources.")
|
||||
- endif(PREFER_EXTERNAL_SNAPPY)
|
||||
- # HAVE_SNAPPY will be set to true because even if the library is not found,
|
||||
- # we will use the included sources for it
|
||||
- set(HAVE_SNAPPY TRUE)
|
||||
-endif(NOT DEACTIVATE_SNAPPY)
|
||||
-
|
||||
-if(NOT DEACTIVATE_ZLIB)
|
||||
- # import the ZLIB_ROOT environment variable to help finding the zlib library
|
||||
- if(PREFER_EXTERNAL_ZLIB)
|
||||
- set(ZLIB_ROOT $ENV{ZLIB_ROOT})
|
||||
- find_package(ZLIB)
|
||||
- if (NOT ZLIB_FOUND )
|
||||
- message(STATUS "No zlib found. Using internal sources.")
|
||||
- endif (NOT ZLIB_FOUND )
|
||||
- else()
|
||||
- message(STATUS "Using zlib internal sources.")
|
||||
- endif(PREFER_EXTERNAL_ZLIB)
|
||||
- # HAVE_ZLIB will be set to true because even if the library is not found,
|
||||
- # we will use the included sources for it
|
||||
- set(HAVE_ZLIB TRUE)
|
||||
-endif(NOT DEACTIVATE_ZLIB)
|
||||
-
|
||||
-if (NOT DEACTIVATE_ZSTD)
|
||||
- if (PREFER_EXTERNAL_ZSTD)
|
||||
- find_package(Zstd)
|
||||
- else ()
|
||||
- message(STATUS "Using ZSTD internal sources.")
|
||||
- endif (PREFER_EXTERNAL_ZSTD)
|
||||
- # HAVE_ZSTD will be set to true because even if the library is
|
||||
- # not found, we will use the included sources for it
|
||||
- set(HAVE_ZSTD TRUE)
|
||||
-endif (NOT DEACTIVATE_ZSTD)
|
||||
+set(LIBS "")
|
||||
+macro(use_package _pkg _tgt)
|
||||
+ string(TOUPPER ${_pkg} _PKG)
|
||||
+ if(NOT DEACTIVATE_${_PKG})
|
||||
+ if(PREFER_EXTERNAL_${_PKG})
|
||||
+ find_package(${_pkg})
|
||||
+ if (NOT ${_pkg}_FOUND )
|
||||
+ message(STATUS "No ${_pkg} found. Using internal sources.")
|
||||
+ endif()
|
||||
+ else()
|
||||
+ message(STATUS "Using ${_pkg} internal sources.")
|
||||
+ endif(PREFER_EXTERNAL_${_PKG})
|
||||
+ # HAVE_${_pkg} will be set to true because even if the library is
|
||||
+ # not found, we will use the included sources for it
|
||||
+ set(HAVE_${_PKG} TRUE)
|
||||
+ list(APPEND LIBS ${_pkg}::${_tgt})
|
||||
+ endif(NOT DEACTIVATE_${_PKG})
|
||||
+endmacro()
|
||||
+
|
||||
+set(ZLIB_ROOT $ENV{ZLIB_ROOT})
|
||||
+use_package(ZLIB ZLIB)
|
||||
+use_package(LZ4 LZ4)
|
||||
+use_package(Snappy snappy)
|
||||
+use_package(Zstd Zstd)
|
||||
|
||||
# create the config.h file
|
||||
configure_file ("blosc/config.h.in" "blosc/config.h" )
|
||||
@@ -316,6 +291,7 @@ endif()
|
||||
|
||||
|
||||
# subdirectories
|
||||
+add_subdirectory(internal-complibs)
|
||||
add_subdirectory(blosc)
|
||||
|
||||
if(BUILD_TESTS)
|
||||
@@ -328,7 +304,6 @@ if(BUILD_BENCHMARKS)
|
||||
add_subdirectory(bench)
|
||||
endif(BUILD_BENCHMARKS)
|
||||
|
||||
-
|
||||
# uninstall target
|
||||
if (BLOSC_INSTALL)
|
||||
configure_file(
|
||||
@@ -338,10 +313,38 @@ if (BLOSC_INSTALL)
|
||||
install(FILES "${CMAKE_CURRENT_BINARY_DIR}/blosc.pc"
|
||||
DESTINATION lib/pkgconfig COMPONENT DEV)
|
||||
|
||||
+ configure_file(
|
||||
+ "${CMAKE_CURRENT_SOURCE_DIR}/cmake_config.cmake.in"
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/cmakeexports/BloscConfig.cmake"
|
||||
+ @ONLY)
|
||||
+
|
||||
configure_file(
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/cmake_uninstall.cmake.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake"
|
||||
IMMEDIATE @ONLY)
|
||||
+
|
||||
+ include(CMakePackageConfigHelpers)
|
||||
+ write_basic_package_version_file(
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/cmakeexports/BloscConfigVersion.cmake"
|
||||
+ VERSION ${BLOSC_VERSION_MAJOR}.${BLOSC_VERSION_MINOR}.${BLOSC_VERSION_PATCH}
|
||||
+ COMPATIBILITY AnyNewerVersion
|
||||
+ )
|
||||
+
|
||||
+ export(EXPORT BloscTargets
|
||||
+ FILE "${CMAKE_CURRENT_BINARY_DIR}/cmakeexports/BloscTargets.cmake"
|
||||
+ NAMESPACE Blosc::)
|
||||
+
|
||||
+ install(EXPORT BloscTargets
|
||||
+ FILE BloscTargets.cmake
|
||||
+ NAMESPACE Blosc::
|
||||
+ DESTINATION lib/cmake/Blosc
|
||||
+ EXPORT_LINK_INTERFACE_LIBRARIES)
|
||||
+
|
||||
+ install(FILES
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/cmakeexports/BloscConfig.cmake"
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/cmakeexports/BloscConfigVersion.cmake"
|
||||
+ DESTINATION lib/cmake/Blosc COMPONENT DEV)
|
||||
+
|
||||
add_custom_target(uninstall
|
||||
COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake)
|
||||
endif()
|
||||
diff --git a/blosc/CMakeLists.txt b/blosc/CMakeLists.txt
|
||||
index 1d1bebe..f554abe 100644
|
||||
--- a/blosc/CMakeLists.txt
|
||||
+++ b/blosc/CMakeLists.txt
|
||||
@@ -1,52 +1,11 @@
|
||||
# a simple way to detect that we are using CMAKE
|
||||
add_definitions(-DUSING_CMAKE)
|
||||
|
||||
-set(INTERNAL_LIBS ${PROJECT_SOURCE_DIR}/internal-complibs)
|
||||
-
|
||||
# Hide symbols by default unless they're specifically exported.
|
||||
# This makes it easier to keep the set of exported symbols the
|
||||
# same across all compilers/platforms.
|
||||
set(CMAKE_C_VISIBILITY_PRESET hidden)
|
||||
|
||||
-# includes
|
||||
-set(BLOSC_INCLUDE_DIRS ${BLOSC_INCLUDE_DIRS} ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
-if(NOT DEACTIVATE_LZ4)
|
||||
- if (LZ4_FOUND)
|
||||
- set(BLOSC_INCLUDE_DIRS ${BLOSC_INCLUDE_DIRS} ${LZ4_INCLUDE_DIR})
|
||||
- else(LZ4_FOUND)
|
||||
- set(LZ4_LOCAL_DIR ${INTERNAL_LIBS}/lz4-1.9.1)
|
||||
- set(BLOSC_INCLUDE_DIRS ${BLOSC_INCLUDE_DIRS} ${LZ4_LOCAL_DIR})
|
||||
- endif(LZ4_FOUND)
|
||||
-endif(NOT DEACTIVATE_LZ4)
|
||||
-
|
||||
-if(NOT DEACTIVATE_SNAPPY)
|
||||
- if (SNAPPY_FOUND)
|
||||
- set(BLOSC_INCLUDE_DIRS ${BLOSC_INCLUDE_DIRS} ${SNAPPY_INCLUDE_DIR})
|
||||
- else(SNAPPY_FOUND)
|
||||
- set(SNAPPY_LOCAL_DIR ${INTERNAL_LIBS}/snappy-1.1.1)
|
||||
- set(BLOSC_INCLUDE_DIRS ${BLOSC_INCLUDE_DIRS} ${SNAPPY_LOCAL_DIR})
|
||||
- endif(SNAPPY_FOUND)
|
||||
-endif(NOT DEACTIVATE_SNAPPY)
|
||||
-
|
||||
-if(NOT DEACTIVATE_ZLIB)
|
||||
- if (ZLIB_FOUND)
|
||||
- set(BLOSC_INCLUDE_DIRS ${BLOSC_INCLUDE_DIRS} ${ZLIB_INCLUDE_DIR})
|
||||
- else(ZLIB_FOUND)
|
||||
- set(ZLIB_LOCAL_DIR ${INTERNAL_LIBS}/zlib-1.2.8)
|
||||
- set(BLOSC_INCLUDE_DIRS ${BLOSC_INCLUDE_DIRS} ${ZLIB_LOCAL_DIR})
|
||||
- endif(ZLIB_FOUND)
|
||||
-endif(NOT DEACTIVATE_ZLIB)
|
||||
-
|
||||
-if (NOT DEACTIVATE_ZSTD)
|
||||
- if (ZSTD_FOUND)
|
||||
- set(BLOSC_INCLUDE_DIRS ${BLOSC_INCLUDE_DIRS} ${ZSTD_INCLUDE_DIR})
|
||||
- else (ZSTD_FOUND)
|
||||
- set(ZSTD_LOCAL_DIR ${INTERNAL_LIBS}/zstd-1.4.1)
|
||||
- set(BLOSC_INCLUDE_DIRS ${BLOSC_INCLUDE_DIRS} ${ZSTD_LOCAL_DIR} ${ZSTD_LOCAL_DIR}/common)
|
||||
- endif (ZSTD_FOUND)
|
||||
-endif (NOT DEACTIVATE_ZSTD)
|
||||
-
|
||||
-include_directories(${BLOSC_INCLUDE_DIRS})
|
||||
|
||||
# library sources
|
||||
set(SOURCES blosc.c blosclz.c fastcopy.c shuffle-generic.c bitshuffle-generic.c
|
||||
@@ -73,53 +32,13 @@ if(WIN32)
|
||||
message(STATUS "using the internal pthread library for win32 systems.")
|
||||
set(SOURCES ${SOURCES} win32/pthread.c)
|
||||
else(NOT Threads_FOUND)
|
||||
- set(LIBS ${LIBS} ${CMAKE_THREAD_LIBS_INIT})
|
||||
+ list(APPEND LIBS Threads::Threads)
|
||||
endif(NOT Threads_FOUND)
|
||||
else(WIN32)
|
||||
find_package(Threads REQUIRED)
|
||||
- set(LIBS ${LIBS} ${CMAKE_THREAD_LIBS_INIT})
|
||||
+ list(APPEND LIBS Threads::Threads)
|
||||
endif(WIN32)
|
||||
|
||||
-if(NOT DEACTIVATE_LZ4)
|
||||
- if(LZ4_FOUND)
|
||||
- set(LIBS ${LIBS} ${LZ4_LIBRARY})
|
||||
- else(LZ4_FOUND)
|
||||
- file(GLOB LZ4_FILES ${LZ4_LOCAL_DIR}/*.c)
|
||||
- set(SOURCES ${SOURCES} ${LZ4_FILES})
|
||||
- endif(LZ4_FOUND)
|
||||
-endif(NOT DEACTIVATE_LZ4)
|
||||
-
|
||||
-if(NOT DEACTIVATE_SNAPPY)
|
||||
- if(SNAPPY_FOUND)
|
||||
- set(LIBS ${LIBS} ${SNAPPY_LIBRARY})
|
||||
- else(SNAPPY_FOUND)
|
||||
- file(GLOB SNAPPY_FILES ${SNAPPY_LOCAL_DIR}/*.cc)
|
||||
- set(SOURCES ${SOURCES} ${SNAPPY_FILES})
|
||||
- endif(SNAPPY_FOUND)
|
||||
-endif(NOT DEACTIVATE_SNAPPY)
|
||||
-
|
||||
-if(NOT DEACTIVATE_ZLIB)
|
||||
- if(ZLIB_FOUND)
|
||||
- set(LIBS ${LIBS} ${ZLIB_LIBRARY})
|
||||
- else(ZLIB_FOUND)
|
||||
- file(GLOB ZLIB_FILES ${ZLIB_LOCAL_DIR}/*.c)
|
||||
- set(SOURCES ${SOURCES} ${ZLIB_FILES})
|
||||
- endif(ZLIB_FOUND)
|
||||
-endif(NOT DEACTIVATE_ZLIB)
|
||||
-
|
||||
-if (NOT DEACTIVATE_ZSTD)
|
||||
- if (ZSTD_FOUND)
|
||||
- set(LIBS ${LIBS} ${ZSTD_LIBRARY})
|
||||
- else (ZSTD_FOUND)
|
||||
- file(GLOB ZSTD_FILES
|
||||
- ${ZSTD_LOCAL_DIR}/common/*.c
|
||||
- ${ZSTD_LOCAL_DIR}/compress/*.c
|
||||
- ${ZSTD_LOCAL_DIR}/decompress/*.c)
|
||||
- set(SOURCES ${SOURCES} ${ZSTD_FILES})
|
||||
- endif (ZSTD_FOUND)
|
||||
-endif (NOT DEACTIVATE_ZSTD)
|
||||
-
|
||||
-
|
||||
# targets
|
||||
if (BUILD_SHARED)
|
||||
add_library(blosc_shared SHARED ${SOURCES})
|
||||
@@ -191,14 +110,17 @@ if (BUILD_TESTS)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
+add_library(blosc INTERFACE)
|
||||
+
|
||||
if (BUILD_SHARED)
|
||||
- target_link_libraries(blosc_shared ${LIBS})
|
||||
- target_include_directories(blosc_shared PUBLIC ${BLOSC_INCLUDE_DIRS})
|
||||
+ target_link_libraries(blosc_shared PRIVATE ${LIBS})
|
||||
+ target_include_directories(blosc_shared PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>)
|
||||
+ target_link_libraries(blosc INTERFACE blosc_shared)
|
||||
endif()
|
||||
|
||||
if (BUILD_TESTS)
|
||||
- target_link_libraries(blosc_shared_testing ${LIBS})
|
||||
- target_include_directories(blosc_shared_testing PUBLIC ${BLOSC_INCLUDE_DIRS})
|
||||
+ target_link_libraries(blosc_shared_testing PRIVATE ${LIBS})
|
||||
+ target_include_directories(blosc_shared_testing PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
if(BUILD_STATIC)
|
||||
@@ -207,17 +129,31 @@ if(BUILD_STATIC)
|
||||
if (MSVC)
|
||||
set_target_properties(blosc_static PROPERTIES PREFIX lib)
|
||||
endif()
|
||||
- target_link_libraries(blosc_static ${LIBS})
|
||||
- target_include_directories(blosc_static PUBLIC ${BLOSC_INCLUDE_DIRS})
|
||||
+ # With the static library, cmake has to deal with transitive dependencies
|
||||
+ target_link_libraries(blosc_static PRIVATE ${LIBS})
|
||||
+ target_include_directories(blosc_static PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>)
|
||||
+ if (NOT BUILD_SHARED)
|
||||
+ target_link_libraries(blosc INTERFACE blosc_static)
|
||||
+ endif()
|
||||
endif(BUILD_STATIC)
|
||||
|
||||
+
|
||||
# install
|
||||
if(BLOSC_INSTALL)
|
||||
install(FILES blosc.h blosc-export.h DESTINATION include COMPONENT DEV)
|
||||
+ set(_inst_libs "blosc")
|
||||
if(BUILD_SHARED)
|
||||
- install(TARGETS blosc_shared DESTINATION ${lib_dir} COMPONENT LIB)
|
||||
+ list(APPEND _inst_libs blosc_shared)
|
||||
endif(BUILD_SHARED)
|
||||
if(BUILD_STATIC)
|
||||
- install(TARGETS blosc_static DESTINATION ${lib_dir} COMPONENT DEV)
|
||||
+ list(APPEND _inst_libs blosc_static)
|
||||
endif(BUILD_STATIC)
|
||||
+
|
||||
+ install(TARGETS ${_inst_libs}
|
||||
+ EXPORT BloscTargets
|
||||
+ LIBRARY DESTINATION ${lib_dir}
|
||||
+ ARCHIVE DESTINATION ${lib_dir}
|
||||
+ RUNTIME DESTINATION bin
|
||||
+ COMPONENT DEV
|
||||
+ INCLUDES DESTINATION include)
|
||||
endif(BLOSC_INSTALL)
|
||||
diff --git a/cmake/FindLZ4.cmake b/cmake/FindLZ4.cmake
|
||||
index e581a80..05de6ef 100644
|
||||
--- a/cmake/FindLZ4.cmake
|
||||
+++ b/cmake/FindLZ4.cmake
|
||||
@@ -5,6 +5,10 @@ find_library(LZ4_LIBRARY NAMES lz4)
|
||||
if (LZ4_INCLUDE_DIR AND LZ4_LIBRARY)
|
||||
set(LZ4_FOUND TRUE)
|
||||
message(STATUS "Found LZ4 library: ${LZ4_LIBRARY}")
|
||||
+ add_library(LZ4::LZ4 UNKNOWN IMPORTED)
|
||||
+ set_target_properties(LZ4::LZ4 PROPERTIES
|
||||
+ IMPORTED_LOCATION ${LZ4_LIBRARY}
|
||||
+ INTERFACE_INCLUDE_DIRECTORIES ${LZ4_INCLUDE_DIR})
|
||||
else ()
|
||||
message(STATUS "No LZ4 library found. Using internal sources.")
|
||||
-endif ()
|
||||
+endif ()
|
||||
\ No newline at end of file
|
||||
diff --git a/cmake/FindSnappy.cmake b/cmake/FindSnappy.cmake
|
||||
index 688d4d5..21dbee1 100644
|
||||
--- a/cmake/FindSnappy.cmake
|
||||
+++ b/cmake/FindSnappy.cmake
|
||||
@@ -3,8 +3,12 @@ find_path(SNAPPY_INCLUDE_DIR snappy-c.h)
|
||||
find_library(SNAPPY_LIBRARY NAMES snappy)
|
||||
|
||||
if (SNAPPY_INCLUDE_DIR AND SNAPPY_LIBRARY)
|
||||
- set(SNAPPY_FOUND TRUE)
|
||||
+ set(Snappy_FOUND TRUE)
|
||||
+ add_library(Snappy::snappy UNKNOWN IMPORTED)
|
||||
+ set_target_properties(Snappy::snappy PROPERTIES
|
||||
+ IMPORTED_LOCATION ${SNAPPY_LIBRARY}
|
||||
+ INTERFACE_INCLUDE_DIRECTORIES ${SNAPPY_INCLUDE_DIR})
|
||||
message(STATUS "Found SNAPPY library: ${SNAPPY_LIBRARY}")
|
||||
else ()
|
||||
message(STATUS "No snappy found. Using internal sources.")
|
||||
-endif ()
|
||||
+endif ()
|
||||
\ No newline at end of file
|
||||
diff --git a/cmake/FindZstd.cmake b/cmake/FindZstd.cmake
|
||||
index 7db4bb9..cabc2f8 100644
|
||||
--- a/cmake/FindZstd.cmake
|
||||
+++ b/cmake/FindZstd.cmake
|
||||
@@ -3,8 +3,12 @@ find_path(ZSTD_INCLUDE_DIR zstd.h)
|
||||
find_library(ZSTD_LIBRARY NAMES zstd)
|
||||
|
||||
if (ZSTD_INCLUDE_DIR AND ZSTD_LIBRARY)
|
||||
- set(ZSTD_FOUND TRUE)
|
||||
+ set(Zstd_FOUND TRUE)
|
||||
+ add_library(Zstd::Zstd UNKNOWN IMPORTED)
|
||||
+ set_target_properties(Zstd::Zstd PROPERTIES
|
||||
+ IMPORTED_LOCATION ${ZSTD_LIBRARY}
|
||||
+ INTERFACE_INCLUDE_DIRECTORIES ${ZSTD_INCLUDE_DIR})
|
||||
message(STATUS "Found Zstd library: ${ZSTD_LIBRARY}")
|
||||
else ()
|
||||
message(STATUS "No Zstd library found. Using internal sources.")
|
||||
-endif ()
|
||||
+endif ()
|
||||
\ No newline at end of file
|
||||
diff --git a/cmake_config.cmake.in b/cmake_config.cmake.in
|
||||
new file mode 100644
|
||||
index 0000000..0f6af24
|
||||
--- /dev/null
|
||||
+++ b/cmake_config.cmake.in
|
||||
@@ -0,0 +1,24 @@
|
||||
+include(CMakeFindDependencyMacro)
|
||||
+
|
||||
+include("${CMAKE_CURRENT_LIST_DIR}/BloscTargets.cmake")
|
||||
+
|
||||
+function(_blosc_remap_configs from_Cfg to_Cfg)
|
||||
+ string(TOUPPER ${from_Cfg} from_CFG)
|
||||
+ string(TOLOWER ${from_Cfg} from_cfg)
|
||||
+
|
||||
+ if(NOT EXISTS ${CMAKE_CURRENT_LIST_DIR}/BloscTargets-${from_cfg}.cmake)
|
||||
+ foreach(tgt IN ITEMS blosc_static blosc_shared blosc)
|
||||
+ if(TARGET Blosc::${tgt})
|
||||
+ set_target_properties(Blosc::${tgt} PROPERTIES
|
||||
+ MAP_IMPORTED_CONFIG_${from_CFG} ${to_Cfg})
|
||||
+ endif()
|
||||
+ endforeach()
|
||||
+ endif()
|
||||
+endfunction()
|
||||
+
|
||||
+# MSVC will try to link RelWithDebInfo or MinSizeRel target with debug config
|
||||
+# if no matching installation is present which would result in link errors.
|
||||
+if(MSVC)
|
||||
+ _blosc_remap_configs(RelWithDebInfo Release)
|
||||
+ _blosc_remap_configs(MinSizeRel Release)
|
||||
+endif()
|
||||
diff --git a/internal-complibs/CMakeLists.txt b/internal-complibs/CMakeLists.txt
|
||||
new file mode 100644
|
||||
index 0000000..4586efa
|
||||
--- /dev/null
|
||||
+++ b/internal-complibs/CMakeLists.txt
|
||||
@@ -0,0 +1,35 @@
|
||||
+macro(add_lib_target pkg tgt incdir files)
|
||||
+ string(TOUPPER ${pkg} TGT)
|
||||
+ if(NOT DEACTIVATE_${TGT} AND NOT ${pkg}_FOUND)
|
||||
+ add_library(${tgt}_objs OBJECT ${files})
|
||||
+ add_library(${tgt} INTERFACE)
|
||||
+ target_include_directories(${tgt}_objs PRIVATE $<BUILD_INTERFACE:${incdir}>)
|
||||
+ target_include_directories(${tgt} INTERFACE $<BUILD_INTERFACE:${incdir}>)
|
||||
+ #set_target_properties(${tgt} PROPERTIES INTERFACE_SOURCES "$<TARGET_OBJECTS:${tgt}_objs>")
|
||||
+ set_target_properties(${tgt}_objs PROPERTIES POSITION_INDEPENDENT_CODE ON)
|
||||
+ target_sources(${tgt} INTERFACE "$<BUILD_INTERFACE:$<TARGET_OBJECTS:${tgt}_objs>>")
|
||||
+ add_library(${pkg}::${tgt} ALIAS ${tgt})
|
||||
+
|
||||
+ # This creates dummy (empty) interface targets in the exported config.
|
||||
+ install(TARGETS ${tgt} EXPORT BloscTargets INCLUDES DESTINATION include)
|
||||
+ endif()
|
||||
+ unset(TGT)
|
||||
+endmacro()
|
||||
+
|
||||
+set(ZLIB_DIR ${CMAKE_CURRENT_SOURCE_DIR}/zlib-1.2.8)
|
||||
+file(GLOB ZLIB_FILES ${ZLIB_DIR}/*.c)
|
||||
+add_lib_target(ZLIB ZLIB ${ZLIB_DIR} "${ZLIB_FILES}")
|
||||
+
|
||||
+set(SNAPPY_DIR ${CMAKE_CURRENT_SOURCE_DIR}/snappy-1.1.1)
|
||||
+file(GLOB SNAPPY_FILES ${SNAPPY_DIR}/*.cc)
|
||||
+add_lib_target(Snappy snappy ${SNAPPY_DIR} "${SNAPPY_FILES}")
|
||||
+
|
||||
+set(LZ4_DIR ${CMAKE_CURRENT_SOURCE_DIR}/lz4-1.9.1)
|
||||
+file(GLOB LZ4_FILES ${LZ4_DIR}/*.c)
|
||||
+add_lib_target(LZ4 LZ4 ${LZ4_DIR} "${LZ4_FILES}")
|
||||
+
|
||||
+set(ZSTD_DIR ${CMAKE_CURRENT_SOURCE_DIR}/zstd-1.4.1)
|
||||
+file(GLOB ZSTD_FILES ${ZSTD_DIR}/common/*.c ${ZSTD_DIR}/compress/*.c ${ZSTD_DIR}/decompress/*.c)
|
||||
+add_lib_target(Zstd Zstd ${ZSTD_DIR} "${ZSTD_FILES}")
|
||||
+target_include_directories(Zstd INTERFACE $<BUILD_INTERFACE:${ZSTD_DIR}/common>)
|
||||
+target_include_directories(Zstd_objs PRIVATE $<BUILD_INTERFACE:${ZSTD_DIR}/common>)
|
||||
\ No newline at end of file
|
||||
--
|
||||
2.16.2.windows.1
|
||||
|
8
deps/deps-linux.cmake
vendored
8
deps/deps-linux.cmake
vendored
|
@ -5,11 +5,11 @@ include("deps-unix-common.cmake")
|
|||
|
||||
ExternalProject_Add(dep_boost
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://dl.bintray.com/boostorg/release/1.66.0/source/boost_1_66_0.tar.gz"
|
||||
URL_HASH SHA256=bd0df411efd9a585e5a2212275f8762079fed8842264954675a4fddc46cfcf60
|
||||
URL "https://dl.bintray.com/boostorg/release/1.70.0/source/boost_1_70_0.tar.gz"
|
||||
URL_HASH SHA256=882b48708d211a5f48e60b0124cf5863c1534cd544ecd0664bb534a4b5d506e9
|
||||
BUILD_IN_SOURCE 1
|
||||
CONFIGURE_COMMAND ./bootstrap.sh
|
||||
--with-libraries=system,filesystem,thread,log,locale,regex
|
||||
--with-libraries=system,iostreams,filesystem,thread,log,locale,regex
|
||||
"--prefix=${DESTDIR}/usr/local"
|
||||
BUILD_COMMAND ./b2
|
||||
-j ${NPROC}
|
||||
|
@ -123,3 +123,5 @@ ExternalProject_Add(dep_wxwidgets
|
|||
BUILD_COMMAND make "-j${NPROC}" && make -C locale allmo
|
||||
INSTALL_COMMAND make install
|
||||
)
|
||||
|
||||
add_dependencies(dep_openvdb dep_boost)
|
23
deps/deps-macos.cmake
vendored
23
deps/deps-macos.cmake
vendored
|
@ -6,7 +6,7 @@ set(DEP_WERRORS_SDK "-Werror=partial-availability -Werror=unguarded-availability
|
|||
set(DEP_CMAKE_OPTS
|
||||
"-DCMAKE_POSITION_INDEPENDENT_CODE=ON"
|
||||
"-DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT}"
|
||||
"-DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}"
|
||||
"-DCMAKE_OSX_DEPLOYMENT_TARGET=${DEP_OSX_TARGET}"
|
||||
"-DCMAKE_CXX_FLAGS=${DEP_WERRORS_SDK}"
|
||||
"-DCMAKE_C_FLAGS=${DEP_WERRORS_SDK}"
|
||||
)
|
||||
|
@ -14,28 +14,27 @@ set(DEP_CMAKE_OPTS
|
|||
include("deps-unix-common.cmake")
|
||||
|
||||
|
||||
set(DEP_BOOST_OSX_TARGET "")
|
||||
if (CMAKE_OSX_DEPLOYMENT_TARGET)
|
||||
set(DEP_BOOST_OSX_TARGET "-mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET}")
|
||||
endif ()
|
||||
|
||||
ExternalProject_Add(dep_boost
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://dl.bintray.com/boostorg/release/1.66.0/source/boost_1_66_0.tar.gz"
|
||||
URL_HASH SHA256=bd0df411efd9a585e5a2212275f8762079fed8842264954675a4fddc46cfcf60
|
||||
URL "https://dl.bintray.com/boostorg/release/1.71.0/source/boost_1_71_0.tar.gz"
|
||||
URL_HASH SHA256=96b34f7468f26a141f6020efb813f1a2f3dfb9797ecf76a7d7cbd843cc95f5bd
|
||||
BUILD_IN_SOURCE 1
|
||||
CONFIGURE_COMMAND ./bootstrap.sh
|
||||
--with-libraries=system,filesystem,thread,log,locale,regex
|
||||
--with-toolset=clang
|
||||
--with-libraries=system,iostreams,filesystem,thread,log,locale,regex
|
||||
"--prefix=${DESTDIR}/usr/local"
|
||||
BUILD_COMMAND ./b2
|
||||
-j ${NPROC}
|
||||
--reconfigure
|
||||
toolset=clang
|
||||
link=static
|
||||
variant=release
|
||||
threading=multi
|
||||
boost.locale.icu=off
|
||||
"cflags=-fPIC ${DEP_BOOST_OSX_TARGET}"
|
||||
"cxxflags=-fPIC ${DEP_BOOST_OSX_TARGET}"
|
||||
"cflags=-fPIC -mmacosx-version-min=${DEP_OSX_TARGET}"
|
||||
"cxxflags=-fPIC -mmacosx-version-min=${DEP_OSX_TARGET}"
|
||||
"mflags=-fPIC -mmacosx-version-min=${DEP_OSX_TARGET}"
|
||||
"mmflags=-fPIC -mmacosx-version-min=${DEP_OSX_TARGET}"
|
||||
install
|
||||
INSTALL_COMMAND "" # b2 does that already
|
||||
)
|
||||
|
@ -114,3 +113,5 @@ ExternalProject_Add(dep_wxwidgets
|
|||
BUILD_COMMAND make "-j${NPROC}" && PATH=/usr/local/opt/gettext/bin/:$ENV{PATH} make -C locale allmo
|
||||
INSTALL_COMMAND make install
|
||||
)
|
||||
|
||||
add_dependencies(dep_openvdb dep_boost)
|
81
deps/deps-unix-common.cmake
vendored
81
deps/deps-unix-common.cmake
vendored
|
@ -7,6 +7,8 @@ else ()
|
|||
set(TBB_MINGW_WORKAROUND "")
|
||||
endif ()
|
||||
|
||||
find_package(ZLIB REQUIRED)
|
||||
|
||||
ExternalProject_Add(dep_tbb
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://github.com/wjakob/tbb/archive/a0dc9bf76d0120f917b641ed095360448cabc85b.tar.gz"
|
||||
|
@ -53,40 +55,67 @@ find_package(Git REQUIRED)
|
|||
|
||||
ExternalProject_Add(dep_qhull
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://github.com/qhull/qhull/archive/v7.2.1.tar.gz"
|
||||
URL_HASH SHA256=6fc251e0b75467e00943bfb7191e986fce0e1f8f6f0251f9c6ce5a843821ea78
|
||||
URL "https://github.com/qhull/qhull/archive/v7.3.2.tar.gz"
|
||||
URL_HASH SHA256=619c8a954880d545194bc03359404ef36a1abd2dde03678089459757fd790cb0
|
||||
CMAKE_ARGS
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_INSTALL_PREFIX=${DESTDIR}/usr/local
|
||||
${DEP_CMAKE_OPTS}
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-space-change --ignore-whitespace ${CMAKE_CURRENT_SOURCE_DIR}/qhull-mods.patch
|
||||
UPDATE_COMMAND ""
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --whitespace=fix ${CMAKE_CURRENT_SOURCE_DIR}/qhull-mods.patch
|
||||
)
|
||||
|
||||
ExternalProject_Add(dep_libigl
|
||||
ExternalProject_Add(dep_blosc
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://github.com/libigl/libigl/archive/v2.0.0.tar.gz"
|
||||
URL_HASH SHA256=42518e6b106c7209c73435fd260ed5d34edeb254852495b4c95dce2d95401328
|
||||
GIT_REPOSITORY https://github.com/Blosc/c-blosc.git
|
||||
GIT_TAG e63775855294b50820ef44d1b157f4de1cc38d3e #v1.17.0
|
||||
DEPENDS
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX=${DESTDIR}/usr/local
|
||||
-DLIBIGL_BUILD_PYTHON=OFF
|
||||
-DLIBIGL_BUILD_TESTS=OFF
|
||||
-DLIBIGL_BUILD_TUTORIALS=OFF
|
||||
-DLIBIGL_USE_STATIC_LIBRARY=OFF #${DEP_BUILD_IGL_STATIC}
|
||||
-DLIBIGL_WITHOUT_COPYLEFT=OFF
|
||||
-DLIBIGL_WITH_CGAL=OFF
|
||||
-DLIBIGL_WITH_COMISO=OFF
|
||||
-DLIBIGL_WITH_CORK=OFF
|
||||
-DLIBIGL_WITH_EMBREE=OFF
|
||||
-DLIBIGL_WITH_MATLAB=OFF
|
||||
-DLIBIGL_WITH_MOSEK=OFF
|
||||
-DLIBIGL_WITH_OPENGL=OFF
|
||||
-DLIBIGL_WITH_OPENGL_GLFW=OFF
|
||||
-DLIBIGL_WITH_OPENGL_GLFW_IMGUI=OFF
|
||||
-DLIBIGL_WITH_PNG=OFF
|
||||
-DLIBIGL_WITH_PYTHON=OFF
|
||||
-DLIBIGL_WITH_TETGEN=OFF
|
||||
-DLIBIGL_WITH_TRIANGLE=OFF
|
||||
-DLIBIGL_WITH_XML=OFF
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-space-change --ignore-whitespace ${CMAKE_CURRENT_SOURCE_DIR}/igl-fixes.patch
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DCMAKE_DEBUG_POSTFIX=d
|
||||
-DBUILD_SHARED=OFF
|
||||
-DBUILD_STATIC=ON
|
||||
-DBUILD_TESTS=OFF
|
||||
-DBUILD_BENCHMARKS=OFF
|
||||
-DPREFER_EXTERNAL_ZLIB=ON
|
||||
UPDATE_COMMAND ""
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --whitespace=fix ${CMAKE_CURRENT_SOURCE_DIR}/blosc-mods.patch
|
||||
)
|
||||
|
||||
ExternalProject_Add(dep_openexr
|
||||
EXCLUDE_FROM_ALL 1
|
||||
GIT_REPOSITORY https://github.com/openexr/openexr.git
|
||||
GIT_TAG eae0e337c9f5117e78114fd05f7a415819df413a #v2.4.0
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX=${DESTDIR}/usr/local
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DBUILD_TESTING=OFF
|
||||
-DPYILMBASE_ENABLE:BOOL=OFF
|
||||
-DOPENEXR_VIEWERS_ENABLE:BOOL=OFF
|
||||
-DOPENEXR_BUILD_UTILS:BOOL=OFF
|
||||
UPDATE_COMMAND ""
|
||||
)
|
||||
|
||||
ExternalProject_Add(dep_openvdb
|
||||
EXCLUDE_FROM_ALL 1
|
||||
GIT_REPOSITORY https://github.com/AcademySoftwareFoundation/openvdb.git
|
||||
GIT_TAG aebaf8d95be5e57fd33949281ec357db4a576c2e #v6.2.1
|
||||
DEPENDS dep_blosc dep_openexr dep_tbb
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX=${DESTDIR}/usr/local
|
||||
-DCMAKE_DEBUG_POSTFIX=d
|
||||
-DCMAKE_PREFIX_PATH=${DESTDIR}/usr/local
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DOPENVDB_BUILD_PYTHON_MODULE=OFF
|
||||
-DUSE_BLOSC=ON
|
||||
-DOPENVDB_CORE_SHARED=OFF
|
||||
-DOPENVDB_CORE_STATIC=ON
|
||||
-DTBB_STATIC=ON
|
||||
-DOPENVDB_BUILD_VDB_PRINT=ON
|
||||
UPDATE_COMMAND ""
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --whitespace=fix ${CMAKE_CURRENT_SOURCE_DIR}/openvdb-mods.patch
|
||||
)
|
236
deps/deps-windows.cmake
vendored
236
deps/deps-windows.cmake
vendored
|
@ -43,6 +43,18 @@ else ()
|
|||
set(DEP_BOOST_DEBUG "")
|
||||
endif ()
|
||||
|
||||
macro(add_debug_dep _dep)
|
||||
if (${DEP_DEBUG})
|
||||
ExternalProject_Get_Property(${_dep} BINARY_DIR)
|
||||
ExternalProject_Add_Step(${_dep} build_debug
|
||||
DEPENDEES build
|
||||
DEPENDERS install
|
||||
COMMAND msbuild /m /P:Configuration=Debug INSTALL.vcxproj
|
||||
WORKING_DIRECTORY "${BINARY_DIR}"
|
||||
)
|
||||
endif ()
|
||||
endmacro()
|
||||
|
||||
ExternalProject_Add(dep_boost
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://dl.bintray.com/boostorg/release/1.70.0/source/boost_1_70_0.tar.gz"
|
||||
|
@ -52,6 +64,7 @@ ExternalProject_Add(dep_boost
|
|||
BUILD_COMMAND b2.exe
|
||||
-j "${NPROC}"
|
||||
--with-system
|
||||
--with-iostreams
|
||||
--with-filesystem
|
||||
--with-thread
|
||||
--with-log
|
||||
|
@ -68,7 +81,6 @@ ExternalProject_Add(dep_boost
|
|||
INSTALL_COMMAND "" # b2 does that already
|
||||
)
|
||||
|
||||
|
||||
ExternalProject_Add(dep_tbb
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://github.com/wjakob/tbb/archive/a0dc9bf76d0120f917b641ed095360448cabc85b.tar.gz"
|
||||
|
@ -83,41 +95,25 @@ ExternalProject_Add(dep_tbb
|
|||
BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
if (${DEP_DEBUG})
|
||||
ExternalProject_Get_Property(dep_tbb BINARY_DIR)
|
||||
ExternalProject_Add_Step(dep_tbb build_debug
|
||||
DEPENDEES build
|
||||
DEPENDERS install
|
||||
COMMAND msbuild /m /P:Configuration=Debug INSTALL.vcxproj
|
||||
WORKING_DIRECTORY "${BINARY_DIR}"
|
||||
)
|
||||
endif ()
|
||||
|
||||
add_debug_dep(dep_tbb)
|
||||
|
||||
ExternalProject_Add(dep_gtest
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://github.com/google/googletest/archive/release-1.8.1.tar.gz"
|
||||
URL_HASH SHA256=9bf1fe5182a604b4135edc1a425ae356c9ad15e9b23f9f12a02e80184c3a249c
|
||||
CMAKE_GENERATOR "${DEP_MSVC_GEN}"
|
||||
CMAKE_GENERATOR_PLATFORM "${DEP_PLATFORM}"
|
||||
CMAKE_ARGS
|
||||
-DBUILD_GMOCK=OFF
|
||||
-Dgtest_force_shared_crt=ON
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
"-DCMAKE_INSTALL_PREFIX:PATH=${DESTDIR}\\usr\\local"
|
||||
BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
if (${DEP_DEBUG})
|
||||
ExternalProject_Get_Property(dep_gtest BINARY_DIR)
|
||||
ExternalProject_Add_Step(dep_gtest build_debug
|
||||
DEPENDEES build
|
||||
DEPENDERS install
|
||||
COMMAND msbuild /m /P:Configuration=Debug INSTALL.vcxproj
|
||||
WORKING_DIRECTORY "${BINARY_DIR}"
|
||||
)
|
||||
endif ()
|
||||
# ExternalProject_Add(dep_gtest
|
||||
# EXCLUDE_FROM_ALL 1
|
||||
# URL "https://github.com/google/googletest/archive/release-1.8.1.tar.gz"
|
||||
# URL_HASH SHA256=9bf1fe5182a604b4135edc1a425ae356c9ad15e9b23f9f12a02e80184c3a249c
|
||||
# CMAKE_GENERATOR "${DEP_MSVC_GEN}"
|
||||
# CMAKE_GENERATOR_PLATFORM "${DEP_PLATFORM}"
|
||||
# CMAKE_ARGS
|
||||
# -DBUILD_GMOCK=OFF
|
||||
# -Dgtest_force_shared_crt=ON
|
||||
# -DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
# "-DCMAKE_INSTALL_PREFIX:PATH=${DESTDIR}\\usr\\local"
|
||||
# BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
# INSTALL_COMMAND ""
|
||||
# )
|
||||
|
||||
# add_debug_dep(dep_gtest)
|
||||
|
||||
ExternalProject_Add(dep_cereal
|
||||
EXCLUDE_FROM_ALL 1
|
||||
|
@ -132,7 +128,6 @@ ExternalProject_Add(dep_cereal
|
|||
INSTALL_COMMAND ""
|
||||
)
|
||||
|
||||
|
||||
ExternalProject_Add(dep_nlopt
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://github.com/stevengj/nlopt/archive/v2.5.0.tar.gz"
|
||||
|
@ -151,16 +146,8 @@ ExternalProject_Add(dep_nlopt
|
|||
BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
if (${DEP_DEBUG})
|
||||
ExternalProject_Get_Property(dep_nlopt BINARY_DIR)
|
||||
ExternalProject_Add_Step(dep_nlopt build_debug
|
||||
DEPENDEES build
|
||||
DEPENDERS install
|
||||
COMMAND msbuild /m /P:Configuration=Debug INSTALL.vcxproj
|
||||
WORKING_DIRECTORY "${BINARY_DIR}"
|
||||
)
|
||||
endif ()
|
||||
|
||||
add_debug_dep(dep_nlopt)
|
||||
|
||||
ExternalProject_Add(dep_zlib
|
||||
EXCLUDE_FROM_ALL 1
|
||||
|
@ -176,15 +163,9 @@ ExternalProject_Add(dep_zlib
|
|||
BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
if (${DEP_DEBUG})
|
||||
ExternalProject_Get_Property(dep_zlib BINARY_DIR)
|
||||
ExternalProject_Add_Step(dep_zlib build_debug
|
||||
DEPENDEES build
|
||||
DEPENDERS install
|
||||
COMMAND msbuild /m /P:Configuration=Debug INSTALL.vcxproj
|
||||
WORKING_DIRECTORY "${BINARY_DIR}"
|
||||
)
|
||||
endif ()
|
||||
|
||||
add_debug_dep(dep_zlib)
|
||||
|
||||
# The following steps are unfortunately needed to remove the _static suffix on libraries
|
||||
ExternalProject_Add_Step(dep_zlib fix_static
|
||||
DEPENDEES install
|
||||
|
@ -199,7 +180,6 @@ if (${DEP_DEBUG})
|
|||
)
|
||||
endif ()
|
||||
|
||||
|
||||
if (${DEPS_BITS} EQUAL 32)
|
||||
set(DEP_LIBCURL_TARGET "x86")
|
||||
else ()
|
||||
|
@ -238,29 +218,21 @@ find_package(Git REQUIRED)
|
|||
|
||||
ExternalProject_Add(dep_qhull
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://github.com/qhull/qhull/archive/v7.2.1.tar.gz"
|
||||
URL_HASH SHA256=6fc251e0b75467e00943bfb7191e986fce0e1f8f6f0251f9c6ce5a843821ea78
|
||||
URL "https://github.com/qhull/qhull/archive/v7.3.2.tar.gz"
|
||||
URL_HASH SHA256=619c8a954880d545194bc03359404ef36a1abd2dde03678089459757fd790cb0
|
||||
CMAKE_GENERATOR "${DEP_MSVC_GEN}"
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX=${DESTDIR}/usr/local
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DCMAKE_DEBUG_POSTFIX=d
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-space-change --ignore-whitespace ${CMAKE_CURRENT_SOURCE_DIR}/qhull-mods.patch
|
||||
UPDATE_COMMAND ""
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --whitespace=fix ${CMAKE_CURRENT_SOURCE_DIR}/qhull-mods.patch
|
||||
BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
|
||||
if (${DEP_DEBUG})
|
||||
ExternalProject_Get_Property(dep_qhull BINARY_DIR)
|
||||
ExternalProject_Add_Step(dep_qhull build_debug
|
||||
DEPENDEES build
|
||||
DEPENDERS install
|
||||
COMMAND msbuild /m /P:Configuration=Debug INSTALL.vcxproj
|
||||
WORKING_DIRECTORY "${BINARY_DIR}"
|
||||
)
|
||||
endif ()
|
||||
|
||||
add_debug_dep(dep_qhull)
|
||||
|
||||
if (${DEPS_BITS} EQUAL 32)
|
||||
set(DEP_WXWIDGETS_TARGET "")
|
||||
|
@ -272,49 +244,6 @@ endif ()
|
|||
|
||||
find_package(Git REQUIRED)
|
||||
|
||||
ExternalProject_Add(dep_libigl
|
||||
EXCLUDE_FROM_ALL 1
|
||||
URL "https://github.com/libigl/libigl/archive/v2.0.0.tar.gz"
|
||||
URL_HASH SHA256=42518e6b106c7209c73435fd260ed5d34edeb254852495b4c95dce2d95401328
|
||||
CMAKE_GENERATOR "${DEP_MSVC_GEN}"
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX=${DESTDIR}/usr/local
|
||||
-DLIBIGL_BUILD_PYTHON=OFF
|
||||
-DLIBIGL_BUILD_TESTS=OFF
|
||||
-DLIBIGL_BUILD_TUTORIALS=OFF
|
||||
-DLIBIGL_USE_STATIC_LIBRARY=OFF #${DEP_BUILD_IGL_STATIC}
|
||||
-DLIBIGL_WITHOUT_COPYLEFT=OFF
|
||||
-DLIBIGL_WITH_CGAL=OFF
|
||||
-DLIBIGL_WITH_COMISO=OFF
|
||||
-DLIBIGL_WITH_CORK=OFF
|
||||
-DLIBIGL_WITH_EMBREE=OFF
|
||||
-DLIBIGL_WITH_MATLAB=OFF
|
||||
-DLIBIGL_WITH_MOSEK=OFF
|
||||
-DLIBIGL_WITH_OPENGL=OFF
|
||||
-DLIBIGL_WITH_OPENGL_GLFW=OFF
|
||||
-DLIBIGL_WITH_OPENGL_GLFW_IMGUI=OFF
|
||||
-DLIBIGL_WITH_PNG=OFF
|
||||
-DLIBIGL_WITH_PYTHON=OFF
|
||||
-DLIBIGL_WITH_TETGEN=OFF
|
||||
-DLIBIGL_WITH_TRIANGLE=OFF
|
||||
-DLIBIGL_WITH_XML=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DCMAKE_DEBUG_POSTFIX=d
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --ignore-space-change --ignore-whitespace ${CMAKE_CURRENT_SOURCE_DIR}/igl-fixes.patch
|
||||
BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
|
||||
if (${DEP_DEBUG})
|
||||
ExternalProject_Get_Property(dep_libigl BINARY_DIR)
|
||||
ExternalProject_Add_Step(dep_libigl build_debug
|
||||
DEPENDEES build
|
||||
DEPENDERS install
|
||||
COMMAND msbuild /m /P:Configuration=Debug INSTALL.vcxproj
|
||||
WORKING_DIRECTORY "${BINARY_DIR}"
|
||||
)
|
||||
endif ()
|
||||
|
||||
ExternalProject_Add(dep_wxwidgets
|
||||
EXCLUDE_FROM_ALL 1
|
||||
GIT_REPOSITORY "https://github.com/prusa3d/wxWidgets"
|
||||
|
@ -337,3 +266,92 @@ if (${DEP_DEBUG})
|
|||
WORKING_DIRECTORY "${SOURCE_DIR}"
|
||||
)
|
||||
endif ()
|
||||
|
||||
ExternalProject_Add(dep_blosc
|
||||
EXCLUDE_FROM_ALL 1
|
||||
#URL https://github.com/Blosc/c-blosc/archive/v1.17.0.zip
|
||||
#URL_HASH SHA256=7463a1df566704f212263312717ab2c36b45d45cba6cd0dccebf91b2cc4b4da9
|
||||
GIT_REPOSITORY https://github.com/Blosc/c-blosc.git
|
||||
GIT_TAG e63775855294b50820ef44d1b157f4de1cc38d3e #v1.17.0
|
||||
DEPENDS dep_zlib
|
||||
CMAKE_GENERATOR "${DEP_MSVC_GEN}"
|
||||
CMAKE_GENERATOR_PLATFORM "${DEP_PLATFORM}"
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX=${DESTDIR}/usr/local
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DCMAKE_DEBUG_POSTFIX=d
|
||||
-DBUILD_SHARED=OFF
|
||||
-DBUILD_STATIC=ON
|
||||
-DBUILD_TESTS=OFF
|
||||
-DBUILD_BENCHMARKS=OFF
|
||||
-DPREFER_EXTERNAL_ZLIB=ON
|
||||
-DBLOSC_IS_SUBPROJECT:BOOL=ON
|
||||
-DBLOSC_INSTALL:BOOL=ON
|
||||
UPDATE_COMMAND ""
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --whitespace=fix ${CMAKE_CURRENT_SOURCE_DIR}/blosc-mods.patch
|
||||
BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
|
||||
add_debug_dep(dep_blosc)
|
||||
|
||||
ExternalProject_Add(dep_openexr
|
||||
EXCLUDE_FROM_ALL 1
|
||||
GIT_REPOSITORY https://github.com/openexr/openexr.git
|
||||
GIT_TAG eae0e337c9f5117e78114fd05f7a415819df413a #v2.4.0
|
||||
DEPENDS dep_zlib
|
||||
CMAKE_GENERATOR "${DEP_MSVC_GEN}"
|
||||
CMAKE_GENERATOR_PLATFORM "${DEP_PLATFORM}"
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX=${DESTDIR}/usr/local
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DBUILD_TESTING=OFF
|
||||
-DPYILMBASE_ENABLE:BOOL=OFF
|
||||
-DOPENEXR_VIEWERS_ENABLE:BOOL=OFF
|
||||
-DOPENEXR_BUILD_UTILS:BOOL=OFF
|
||||
UPDATE_COMMAND ""
|
||||
BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
|
||||
add_debug_dep(dep_openexr)
|
||||
|
||||
ExternalProject_Add(dep_openvdb
|
||||
EXCLUDE_FROM_ALL 1
|
||||
#URL https://github.com/AcademySoftwareFoundation/openvdb/archive/v6.2.1.zip
|
||||
#URL_HASH SHA256=dc337399dce8e1c9f21f20e97b1ce7e4933cb0a63bb3b8b734d8fcc464aa0c48
|
||||
GIT_REPOSITORY https://github.com/AcademySoftwareFoundation/openvdb.git
|
||||
GIT_TAG aebaf8d95be5e57fd33949281ec357db4a576c2e #v6.2.1
|
||||
DEPENDS dep_blosc dep_openexr #dep_tbb dep_boost
|
||||
CMAKE_GENERATOR "${DEP_MSVC_GEN}"
|
||||
CMAKE_GENERATOR_PLATFORM "${DEP_PLATFORM}"
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX=${DESTDIR}/usr/local
|
||||
-DCMAKE_DEBUG_POSTFIX=d
|
||||
-DCMAKE_PREFIX_PATH=${DESTDIR}/usr/local
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DOPENVDB_BUILD_PYTHON_MODULE=OFF
|
||||
-DUSE_BLOSC=ON
|
||||
-DOPENVDB_CORE_SHARED=OFF
|
||||
-DOPENVDB_CORE_STATIC=ON
|
||||
-DTBB_STATIC=ON
|
||||
-DOPENVDB_BUILD_VDB_PRINT=ON
|
||||
BUILD_COMMAND msbuild /m /P:Configuration=Release INSTALL.vcxproj
|
||||
UPDATE_COMMAND ""
|
||||
PATCH_COMMAND ${GIT_EXECUTABLE} apply --whitespace=fix ${CMAKE_CURRENT_SOURCE_DIR}/openvdb-mods.patch
|
||||
INSTALL_COMMAND ""
|
||||
)
|
||||
|
||||
if (${DEP_DEBUG})
|
||||
ExternalProject_Get_Property(dep_openvdb BINARY_DIR)
|
||||
ExternalProject_Add_Step(dep_openvdb build_debug
|
||||
DEPENDEES build
|
||||
DEPENDERS install
|
||||
COMMAND ${CMAKE_COMMAND} ../dep_openvdb -DOPENVDB_BUILD_VDB_PRINT=OFF
|
||||
COMMAND msbuild /m /P:Configuration=Debug INSTALL.vcxproj
|
||||
WORKING_DIRECTORY "${BINARY_DIR}"
|
||||
)
|
||||
endif ()
|
128
deps/igl-fixes.patch
vendored
128
deps/igl-fixes.patch
vendored
|
@ -1,128 +0,0 @@
|
|||
diff --git a/cmake/libigl-config.cmake.in b/cmake/libigl-config.cmake.in
|
||||
index 317c745c..f9808e1e 100644
|
||||
--- a/cmake/libigl-config.cmake.in
|
||||
+++ b/cmake/libigl-config.cmake.in
|
||||
@@ -2,28 +2,28 @@
|
||||
|
||||
include(${CMAKE_CURRENT_LIST_DIR}/libigl-export.cmake)
|
||||
|
||||
-if (TARGET igl::core)
|
||||
- if (NOT TARGET Eigen3::Eigen)
|
||||
- find_package(Eigen3 QUIET)
|
||||
- if (NOT Eigen3_FOUND)
|
||||
- # try with PkgCOnfig
|
||||
- find_package(PkgConfig REQUIRED)
|
||||
- pkg_check_modules(Eigen3 QUIET IMPORTED_TARGET eigen3)
|
||||
- endif()
|
||||
-
|
||||
- if (NOT Eigen3_FOUND)
|
||||
- message(FATAL_ERROR "Could not find required dependency Eigen3")
|
||||
- set(libigl_core_FOUND FALSE)
|
||||
- else()
|
||||
- target_link_libraries(igl::core INTERFACE PkgConfig::Eigen3)
|
||||
- set(libigl_core_FOUND TRUE)
|
||||
- endif()
|
||||
- else()
|
||||
- target_link_libraries(igl::core INTERFACE Eigen3::Eigen)
|
||||
- set(libigl_core_FOUND TRUE)
|
||||
- endif()
|
||||
-
|
||||
-endif()
|
||||
+# if (TARGET igl::core)
|
||||
+# if (NOT TARGET Eigen3::Eigen)
|
||||
+# find_package(Eigen3 QUIET)
|
||||
+# if (NOT Eigen3_FOUND)
|
||||
+# # try with PkgCOnfig
|
||||
+# find_package(PkgConfig REQUIRED)
|
||||
+# pkg_check_modules(Eigen3 QUIET IMPORTED_TARGET eigen3)
|
||||
+# endif()
|
||||
+#
|
||||
+# if (NOT Eigen3_FOUND)
|
||||
+# message(FATAL_ERROR "Could not find required dependency Eigen3")
|
||||
+# set(libigl_core_FOUND FALSE)
|
||||
+# else()
|
||||
+# target_link_libraries(igl::core INTERFACE PkgConfig::Eigen3)
|
||||
+# set(libigl_core_FOUND TRUE)
|
||||
+# endif()
|
||||
+# else()
|
||||
+# target_link_libraries(igl::core INTERFACE Eigen3::Eigen)
|
||||
+# set(libigl_core_FOUND TRUE)
|
||||
+# endif()
|
||||
+#
|
||||
+# endif()
|
||||
|
||||
check_required_components(libigl)
|
||||
|
||||
diff --git a/cmake/libigl.cmake b/cmake/libigl.cmake
|
||||
index 4b11007a..47e6c395 100644
|
||||
--- a/cmake/libigl.cmake
|
||||
+++ b/cmake/libigl.cmake
|
||||
@@ -445,6 +445,7 @@ function(install_dir_files dir_name)
|
||||
if(NOT LIBIGL_USE_STATIC_LIBRARY)
|
||||
file(GLOB public_sources
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/include/igl${subpath}/*.cpp
|
||||
+ ${CMAKE_CURRENT_SOURCE_DIR}/include/igl${subpath}/*.c
|
||||
)
|
||||
endif()
|
||||
list(APPEND files_to_install ${public_sources})
|
||||
diff --git a/include/igl/AABB.cpp b/include/igl/AABB.cpp
|
||||
index 09537335..92e90cb7 100644
|
||||
--- a/include/igl/AABB.cpp
|
||||
+++ b/include/igl/AABB.cpp
|
||||
@@ -1071,5 +1071,11 @@ template void igl::AABB<Eigen::Matrix<double, -1, -1, 0, -1, -1>, 3>::init<Eigen
|
||||
// generated by autoexplicit.sh
|
||||
template void igl::AABB<Eigen::Matrix<double, -1, -1, 0, -1, -1>, 2>::init<Eigen::Matrix<int, -1, -1, 0, -1, -1> >(Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&);
|
||||
template double igl::AABB<Eigen::Matrix<double, -1, -1, 0, -1, -1>, 3>::squared_distance<Eigen::Matrix<int, -1, -1, 0, -1, -1> >(Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&, Eigen::Matrix<double, 1, 3, 1, 1, 3> const&, double, int&, Eigen::PlainObjectBase<Eigen::Matrix<double, 1, 3, 1, 1, 3> >&) const;
|
||||
+template float igl::AABB<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> >, 3>::squared_distance<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > >(Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::Matrix<float, 1, 3, 1, 1, 3> const&, int&, Eigen::PlainObjectBase<Eigen::Matrix<float, 1, 3, 1, 1, 3> >&) const;
|
||||
template bool igl::AABB<Eigen::Matrix<double, -1, -1, 0, -1, -1>, 3>::intersect_ray<Eigen::Matrix<int, -1, -1, 0, -1, -1> >(Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&, Eigen::Matrix<double, 1, 3, 1, 1, 3> const&, Eigen::Matrix<double, 1, 3, 1, 1, 3> const&, igl::Hit&) const;
|
||||
+template bool igl::AABB<Eigen::Matrix<double, -1, -1, 0, -1, -1>, 3>::intersect_ray<Eigen::Matrix<int, -1, -1, 0, -1, -1> >(Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&, Eigen::Matrix<double, 1, 3, 1, 1, 3> const&, Eigen::Matrix<double, 1, 3, 1, 1, 3> const&, std::vector<igl::Hit>&) const;
|
||||
+
|
||||
+template void igl::AABB<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> >, 3>::init<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > >(Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&);
|
||||
+
|
||||
+template bool igl::AABB<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> >, 3>::intersect_ray<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > >(Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::Matrix<float, 1, 3, 1, 1, 3> const&, Eigen::Matrix<float, 1, 3, 1, 1, 3> const&, std::vector<igl::Hit, std::allocator<igl::Hit> >&) const;
|
||||
#endif
|
||||
diff --git a/include/igl/barycenter.cpp b/include/igl/barycenter.cpp
|
||||
index 065f82aa..ec2d96cd 100644
|
||||
--- a/include/igl/barycenter.cpp
|
||||
+++ b/include/igl/barycenter.cpp
|
||||
@@ -54,4 +54,6 @@ template void igl::barycenter<Eigen::Matrix<double, -1, -1, 0, -1, -1>, Eigen::M
|
||||
template void igl::barycenter<Eigen::Matrix<double, -1, 3, 0, -1, 3>, Eigen::Matrix<int, -1, 3, 0, -1, 3>, Eigen::Matrix<double, -1, 3, 0, -1, 3> >(Eigen::MatrixBase<Eigen::Matrix<double, -1, 3, 0, -1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, 3, 0, -1, 3> > const&, Eigen::PlainObjectBase<Eigen::Matrix<double, -1, 3, 0, -1, 3> >&);
|
||||
template void igl::barycenter<Eigen::Matrix<double, -1, 3, 0, -1, 3>, Eigen::Matrix<int, -1, 3, 0, -1, 3>, Eigen::Matrix<double, -1, -1, 0, -1, -1> >(Eigen::MatrixBase<Eigen::Matrix<double, -1, 3, 0, -1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, 3, 0, -1, 3> > const&, Eigen::PlainObjectBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> >&);
|
||||
template void igl::barycenter<Eigen::Matrix<double, -1, -1, 0, -1, -1>, Eigen::Matrix<int, -1, -1, 0, -1, -1>, Eigen::Matrix<double, -1, 2, 0, -1, 2> >(Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&, Eigen::PlainObjectBase<Eigen::Matrix<double, -1, 2, 0, -1, 2> >&);
|
||||
+
|
||||
+template void igl::barycenter<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> >, Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> >, Eigen::Matrix<float, -1, 3, 0, -1, 3> >(Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::PlainObjectBase<Eigen::Matrix<float, -1, 3, 0, -1, 3> >&);
|
||||
#endif
|
||||
diff --git a/include/igl/point_simplex_squared_distance.cpp b/include/igl/point_simplex_squared_distance.cpp
|
||||
index 2b98bd28..c66d9ae1 100644
|
||||
--- a/include/igl/point_simplex_squared_distance.cpp
|
||||
+++ b/include/igl/point_simplex_squared_distance.cpp
|
||||
@@ -178,4 +178,6 @@ template void igl::point_simplex_squared_distance<3, Eigen::Matrix<double, 1, 3,
|
||||
template void igl::point_simplex_squared_distance<3, Eigen::Matrix<double, 1, 3, 1, 1, 3>, Eigen::Matrix<double, -1, -1, 0, -1, -1>, Eigen::Matrix<int, -1, -1, 0, -1, -1>, double, Eigen::Matrix<double, 1, 3, 1, 1, 3> >(Eigen::MatrixBase<Eigen::Matrix<double, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&, Eigen::Matrix<int, -1, -1, 0, -1, -1>::Index, double&, Eigen::MatrixBase<Eigen::Matrix<double, 1, 3, 1, 1, 3> >&, Eigen::PlainObjectBase<Eigen::Matrix<double, 3, 1, 1, 1, 3> >&);
|
||||
template void igl::point_simplex_squared_distance<2, Eigen::Matrix<double, 1, 2, 1, 1, 2>, Eigen::Matrix<double, -1, -1, 0, -1, -1>, Eigen::Matrix<int, -1, -1, 0, -1, -1>, double, Eigen::Matrix<double, 1, 2, 1, 1, 2> >(Eigen::MatrixBase<Eigen::Matrix<double, 1, 2, 1, 1, 2> > const&, Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&, Eigen::Matrix<int, -1, -1, 0, -1, -1>::Index, double&, Eigen::MatrixBase<Eigen::Matrix<double, 1, 2, 1, 1, 2> >&, Eigen::PlainObjectBase<Eigen::Matrix<double, 1, 2, 1, 1, 2> >&);
|
||||
template void igl::point_simplex_squared_distance<2, Eigen::Matrix<double, 1, 2, 1, 1, 2>, Eigen::Matrix<double, -1, -1, 0, -1, -1>, Eigen::Matrix<int, -1, -1, 0, -1, -1>, double, Eigen::Matrix<double, 1, 2, 1, 1, 2> >(Eigen::MatrixBase<Eigen::Matrix<double, 1, 2, 1, 1, 2> > const&, Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&, Eigen::Matrix<int, -1, -1, 0, -1, -1>::Index, double&, Eigen::MatrixBase<Eigen::Matrix<double, 1, 2, 1, 1, 2> >&, Eigen::PlainObjectBase<Eigen::Matrix<double, 2, 1, 1, 1, 2> >&);
|
||||
+
|
||||
+template void igl::point_simplex_squared_distance<3, Eigen::Matrix<float, 1, 3, 1, 1, 3>, Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> >, Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> >, float, Eigen::Matrix<float, 1, 3, 1, 1, 3> >(Eigen::MatrixBase<Eigen::Matrix<float, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> >::Index, float&, Eigen::MatrixBase<Eigen::Matrix<float, 1, 3, 1, 1, 3> >&);
|
||||
#endif
|
||||
diff --git a/include/igl/ray_box_intersect.cpp b/include/igl/ray_box_intersect.cpp
|
||||
index 4a88b89e..b547f8f8 100644
|
||||
--- a/include/igl/ray_box_intersect.cpp
|
||||
+++ b/include/igl/ray_box_intersect.cpp
|
||||
@@ -147,4 +147,6 @@ IGL_INLINE bool igl::ray_box_intersect(
|
||||
#ifdef IGL_STATIC_LIBRARY
|
||||
// Explicit template instantiation
|
||||
template bool igl::ray_box_intersect<Eigen::Matrix<double, 1, 3, 1, 1, 3>, Eigen::Matrix<double, 1, 3, 1, 1, 3>, double>(Eigen::MatrixBase<Eigen::Matrix<double, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<double, 1, 3, 1, 1, 3> > const&, Eigen::AlignedBox<double, 3> const&, double const&, double const&, double&, double&);
|
||||
+
|
||||
+template bool igl::ray_box_intersect<Eigen::Matrix<float, 1, 3, 1, 1, 3>, Eigen::Matrix<float, 1, 3, 1, 1, 3>, float>(Eigen::MatrixBase<Eigen::Matrix<float, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<float, 1, 3, 1, 1, 3> > const&, Eigen::AlignedBox<float, 3> const&, float const&, float const&, float&, float&);
|
||||
#endif
|
||||
diff --git a/include/igl/ray_mesh_intersect.cpp b/include/igl/ray_mesh_intersect.cpp
|
||||
index 9a70a22b..4233e722 100644
|
||||
--- a/include/igl/ray_mesh_intersect.cpp
|
||||
+++ b/include/igl/ray_mesh_intersect.cpp
|
||||
@@ -83,4 +83,7 @@ IGL_INLINE bool igl::ray_mesh_intersect(
|
||||
template bool igl::ray_mesh_intersect<Eigen::Matrix<float, 3, 1, 0, 3, 1>, Eigen::Matrix<float, 3, 1, 0, 3, 1>, Eigen::Matrix<double, -1, -1, 0, -1, -1>, Eigen::Matrix<int, -1, -1, 0, -1, -1> >(Eigen::MatrixBase<Eigen::Matrix<float, 3, 1, 0, 3, 1> > const&, Eigen::MatrixBase<Eigen::Matrix<float, 3, 1, 0, 3, 1> > const&, Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&, std::vector<igl::Hit, std::allocator<igl::Hit> >&);
|
||||
template bool igl::ray_mesh_intersect<Eigen::Matrix<float, 3, 1, 0, 3, 1>, Eigen::Matrix<float, 3, 1, 0, 3, 1>, Eigen::Matrix<double, -1, -1, 0, -1, -1>, Eigen::Matrix<int, -1, -1, 0, -1, -1> >(Eigen::MatrixBase<Eigen::Matrix<float, 3, 1, 0, 3, 1> > const&, Eigen::MatrixBase<Eigen::Matrix<float, 3, 1, 0, 3, 1> > const&, Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Matrix<int, -1, -1, 0, -1, -1> > const&, igl::Hit&);
|
||||
template bool igl::ray_mesh_intersect<Eigen::Matrix<double, 1, 3, 1, 1, 3>, Eigen::Matrix<double, 1, 3, 1, 1, 3>, Eigen::Matrix<double, -1, -1, 0, -1, -1>, Eigen::Block<Eigen::Matrix<int, -1, -1, 0, -1, -1> const, 1, -1, false> >(Eigen::MatrixBase<Eigen::Matrix<double, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<double, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Block<Eigen::Matrix<int, -1, -1, 0, -1, -1> const, 1, -1, false> > const&, igl::Hit&);
|
||||
+template bool igl::ray_mesh_intersect<Eigen::Matrix<double, 1, 3, 1, 1, 3>, Eigen::Matrix<double, 1, 3, 1, 1, 3>, Eigen::Matrix<double, -1, -1, 0, -1, -1>, Eigen::Block<Eigen::Matrix<int, -1, -1, 0, -1, -1> const, 1, -1, false> >(Eigen::MatrixBase<Eigen::Matrix<double, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<double, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<double, -1, -1, 0, -1, -1> > const&, Eigen::MatrixBase<Eigen::Block<Eigen::Matrix<int, -1, -1, 0, -1, -1> const, 1, -1, false> > const&, std::vector<igl::Hit, std::allocator<igl::Hit> >&);
|
||||
+
|
||||
+template bool igl::ray_mesh_intersect<Eigen::Matrix<float, 1, 3, 1, 1, 3>, Eigen::Matrix<float, 1, 3, 1, 1, 3>, Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> >, Eigen::Block<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > const, 1, -1, true> >(Eigen::MatrixBase<Eigen::Matrix<float, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Matrix<float, 1, 3, 1, 1, 3> > const&, Eigen::MatrixBase<Eigen::Map<Eigen::Matrix<float, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > > const&, Eigen::MatrixBase<Eigen::Block<Eigen::Map<Eigen::Matrix<int, -1, -1, 3, -1, -1> const, 0, Eigen::Stride<0, 0> > const, 1, -1, true> > const&, std::vector<igl::Hit, std::allocator<igl::Hit> >&);
|
||||
#endif
|
1782
deps/openvdb-mods.patch
vendored
Normal file
1782
deps/openvdb-mods.patch
vendored
Normal file
File diff suppressed because it is too large
Load diff
144
deps/qhull-mods.patch
vendored
144
deps/qhull-mods.patch
vendored
|
@ -1,121 +1,49 @@
|
|||
From a31ae4781a4afa60e21c70e5b4ae784bcd447c8a Mon Sep 17 00:00:00 2001
|
||||
From 7f55a56b3d112f4dffbf21b1722f400c64bf03b1 Mon Sep 17 00:00:00 2001
|
||||
From: tamasmeszaros <meszaros.q@gmail.com>
|
||||
Date: Thu, 6 Jun 2019 15:41:43 +0200
|
||||
Subject: [PATCH] prusa-slicer changes
|
||||
Date: Mon, 21 Oct 2019 16:52:04 +0200
|
||||
Subject: [PATCH] Fix the build on macOS
|
||||
|
||||
---
|
||||
CMakeLists.txt | 44 +++++++++++++++++++++++++++++++++++---
|
||||
Config.cmake.in | 2 ++
|
||||
src/libqhull_r/qhull_r-exports.def | 2 ++
|
||||
src/libqhull_r/user_r.h | 2 +-
|
||||
4 files changed, 46 insertions(+), 4 deletions(-)
|
||||
create mode 100644 Config.cmake.in
|
||||
CMakeLists.txt | 10 +++++-----
|
||||
1 file changed, 5 insertions(+), 5 deletions(-)
|
||||
|
||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index 59dff41..20c2ec5 100644
|
||||
index 07d3da2..14df8e9 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -61,7 +61,7 @@
|
||||
# $DateTime: 2016/01/18 19:29:17 $$Author: bbarber $
|
||||
@@ -626,18 +626,18 @@ install(TARGETS ${qhull_TARGETS_INSTALL} EXPORT QhullTargets
|
||||
include(CMakePackageConfigHelpers)
|
||||
|
||||
project(qhull)
|
||||
-cmake_minimum_required(VERSION 2.6)
|
||||
+cmake_minimum_required(VERSION 3.0)
|
||||
write_basic_package_version_file(
|
||||
- "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullConfigVersion.cmake"
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/QhullExport/QhullConfigVersion.cmake"
|
||||
VERSION ${qhull_VERSION}
|
||||
COMPATIBILITY AnyNewerVersion
|
||||
)
|
||||
|
||||
# Define qhull_VERSION in CMakeLists.txt, Makefile, qhull-exports.def, qhull_p-exports.def, qhull_r-exports.def, qhull-warn.pri
|
||||
set(qhull_VERSION2 "2015.2 2016/01/18") # not used, See global.c, global_r.c, rbox.c, rbox_r.c
|
||||
@@ -610,10 +610,48 @@ add_test(NAME user_eg3
|
||||
# Define install
|
||||
# ---------------------------------------
|
||||
export(EXPORT QhullTargets
|
||||
- FILE "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullTargets.cmake"
|
||||
+ FILE "${CMAKE_CURRENT_BINARY_DIR}/QhullExport/QhullTargets.cmake"
|
||||
NAMESPACE Qhull::
|
||||
)
|
||||
|
||||
-install(TARGETS ${qhull_TARGETS_INSTALL}
|
||||
+install(TARGETS ${qhull_TARGETS_INSTALL} EXPORT QhullTargets
|
||||
RUNTIME DESTINATION ${BIN_INSTALL_DIR}
|
||||
LIBRARY DESTINATION ${LIB_INSTALL_DIR}
|
||||
- ARCHIVE DESTINATION ${LIB_INSTALL_DIR})
|
||||
+ ARCHIVE DESTINATION ${LIB_INSTALL_DIR}
|
||||
+ INCLUDES DESTINATION include)
|
||||
+
|
||||
+include(CMakePackageConfigHelpers)
|
||||
+
|
||||
+write_basic_package_version_file(
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullConfigVersion.cmake"
|
||||
+ VERSION ${qhull_VERSION}
|
||||
+ COMPATIBILITY AnyNewerVersion
|
||||
+)
|
||||
+
|
||||
+export(EXPORT QhullTargets
|
||||
+ FILE "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullTargets.cmake"
|
||||
+ NAMESPACE Qhull::
|
||||
+)
|
||||
+
|
||||
+configure_file(Config.cmake.in
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullConfig.cmake"
|
||||
+ @ONLY
|
||||
+)
|
||||
+
|
||||
+set(ConfigPackageLocation lib/cmake/Qhull)
|
||||
+install(EXPORT QhullTargets
|
||||
+ FILE
|
||||
+ QhullTargets.cmake
|
||||
+ NAMESPACE
|
||||
+ Qhull::
|
||||
+ DESTINATION
|
||||
+ ${ConfigPackageLocation}
|
||||
+)
|
||||
+install(
|
||||
+ FILES
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullConfig.cmake"
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullConfigVersion.cmake"
|
||||
+ DESTINATION
|
||||
+ ${ConfigPackageLocation}
|
||||
+ COMPONENT
|
||||
+ Devel
|
||||
+)
|
||||
configure_file(${PROJECT_SOURCE_DIR}/build/config.cmake.in
|
||||
- "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullConfig.cmake"
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/QhullExport/QhullConfig.cmake"
|
||||
@ONLY
|
||||
)
|
||||
|
||||
install(FILES ${libqhull_HEADERS} DESTINATION ${INCLUDE_INSTALL_DIR}/libqhull)
|
||||
install(FILES ${libqhull_DOC} DESTINATION ${INCLUDE_INSTALL_DIR}/libqhull)
|
||||
diff --git a/Config.cmake.in b/Config.cmake.in
|
||||
new file mode 100644
|
||||
index 0000000..bc92bfe
|
||||
--- /dev/null
|
||||
+++ b/Config.cmake.in
|
||||
@@ -0,0 +1,2 @@
|
||||
+include("${CMAKE_CURRENT_LIST_DIR}/QhullTargets.cmake")
|
||||
+
|
||||
diff --git a/src/libqhull_r/qhull_r-exports.def b/src/libqhull_r/qhull_r-exports.def
|
||||
index 325d57c..72f6ad0 100644
|
||||
--- a/src/libqhull_r/qhull_r-exports.def
|
||||
+++ b/src/libqhull_r/qhull_r-exports.def
|
||||
@@ -185,6 +185,7 @@ qh_memsetup
|
||||
qh_memsize
|
||||
qh_memstatistics
|
||||
qh_memtotal
|
||||
+qh_memcheck
|
||||
qh_merge_degenredundant
|
||||
qh_merge_nonconvex
|
||||
qh_mergecycle
|
||||
@@ -372,6 +373,7 @@ qh_settruncate
|
||||
qh_setunique
|
||||
qh_setvoronoi_all
|
||||
qh_setzero
|
||||
+qh_setendpointer
|
||||
qh_sharpnewfacets
|
||||
qh_skipfacet
|
||||
qh_skipfilename
|
||||
diff --git a/src/libqhull_r/user_r.h b/src/libqhull_r/user_r.h
|
||||
index fc105b9..7cca65a 100644
|
||||
--- a/src/libqhull_r/user_r.h
|
||||
+++ b/src/libqhull_r/user_r.h
|
||||
@@ -139,7 +139,7 @@ Code flags --
|
||||
REALfloat = 1 all numbers are 'float' type
|
||||
= 0 all numbers are 'double' type
|
||||
*/
|
||||
-#define REALfloat 0
|
||||
+#define REALfloat 1
|
||||
|
||||
#if (REALfloat == 1)
|
||||
#define realT float
|
||||
@@ -652,8 +652,8 @@ install(EXPORT QhullTargets
|
||||
)
|
||||
install(
|
||||
FILES
|
||||
- "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullConfig.cmake"
|
||||
- "${CMAKE_CURRENT_BINARY_DIR}/Qhull/QhullConfigVersion.cmake"
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/QhullExport/QhullConfig.cmake"
|
||||
+ "${CMAKE_CURRENT_BINARY_DIR}/QhullExport/QhullConfigVersion.cmake"
|
||||
DESTINATION
|
||||
${ConfigPackageLocation}
|
||||
COMPONENT
|
||||
--
|
||||
2.16.2.windows.1
|
||||
2.17.1
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Dependency report for PrusaSlicer
|
||||
## Possible dynamic linking on Linux
|
||||
* zlib: This should not be even mentioned in our cmake scripts but due to a bug in the system libraries of gtk it has to be linked to PrusaSlicer.
|
||||
* zlib: Strict dependency required from the system, linked dynamically. Many other libs depend on zlib.
|
||||
* wxWidgets: searches for wx-3.1 by default, but with cmake option `SLIC3R_WX_STABLE=ON` it will use wx-3.0 bundled with most distros.
|
||||
* libcurl
|
||||
* tbb
|
||||
|
@ -10,13 +10,13 @@
|
|||
* expat
|
||||
* openssl
|
||||
* nlopt
|
||||
* gtest
|
||||
* openvdb: This library depends on other libs, namely boost, zlib, openexr, blosc (not strictly), etc...
|
||||
|
||||
## External libraries in source tree
|
||||
* ad-mesh: Lots of customization, have to be bundled in the source tree.
|
||||
* avrdude: Like ad-mesh, many customization, need to be in the source tree.
|
||||
* clipper: An important library we have to have full control over it. We also have some slicer specific modifications.
|
||||
* glu-libtess: This is an extract of the mesa/glu library not oficially available as a package.
|
||||
* glu-libtess: This is an extract of the mesa/glu library not officially available as a package.
|
||||
* imgui: no packages for debian, author suggests using in the source tree
|
||||
* miniz: No packages, author suggests using in the source tree
|
||||
* qhull: libqhull-dev does not contain libqhullcpp => link errors. Until it is fixed, we will use the builtin version. https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=925540
|
||||
|
@ -29,5 +29,6 @@
|
|||
* igl
|
||||
* nanosvg
|
||||
* agg
|
||||
* catch2: Only Arch has packages for catch2, other distros at most catch (v1.x). Being strictly header only, we bundle this in the source tree. Used for the unit-test suites.
|
||||
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Building PrusaSlicer on UNIX/Linux
|
||||
|
||||
PrusaSlicer uses the CMake build system and requires several dependencies.
|
||||
The dependencies can be listed in `deps/deps-linux.cmake`, although they don't necessarily need to be as recent
|
||||
The dependencies can be listed in `deps/deps-linux.cmake` and `deps/deps-unix-common.cmake`, although they don't necessarily need to be as recent
|
||||
as the versions listed - generally versions available on conservative Linux distros such as Debian stable or CentOS should suffice.
|
||||
|
||||
Perl is not required any more.
|
||||
|
|
|
@ -45,6 +45,7 @@ src/slic3r/GUI/WipeTowerDialog.cpp
|
|||
src/slic3r/GUI/wxExtensions.cpp
|
||||
src/slic3r/Utils/Duet.cpp
|
||||
src/slic3r/Utils/OctoPrint.cpp
|
||||
src/slic3r/Utils/FlashAir.cpp
|
||||
src/slic3r/Utils/PresetUpdater.cpp
|
||||
src/slic3r/Utils/FixModelByWin10.cpp
|
||||
src/libslic3r/Zipper.cpp
|
||||
|
|
|
@ -82,6 +82,29 @@ variants = default
|
|||
technology = SLA
|
||||
family = SL1
|
||||
|
||||
[default_filaments]
|
||||
Generic PLA = 1
|
||||
Generic PLA MMU2 = 1
|
||||
Prusa PLA = 1
|
||||
Prusa PLA MMU2 = 1
|
||||
Prusament PLA = 1
|
||||
Prusament PLA MMU2 = 1
|
||||
|
||||
[default_sla_materials]
|
||||
Prusa Azure Blue Tough 0.05 = 1
|
||||
Prusa Black Tough 0.05 = 1
|
||||
Prusa Green Casting 0.05 = 1
|
||||
Prusa Grey Tough 0.05 = 1
|
||||
Prusa Maroon Tough 0.05 = 1
|
||||
Prusa Orange Tough 0.025 = 1
|
||||
Prusa Orange Tough 0.035 = 1
|
||||
Prusa Orange Tough 0.05 = 1
|
||||
Prusa Orange Tough 0.1 = 1
|
||||
Prusa Pink Tough 0.05 = 1
|
||||
Prusa Skin Tough 0.05 = 1
|
||||
Prusa Transparent Red Tough 0.05 = 1
|
||||
Prusa White Tough 0.05 = 1
|
||||
|
||||
# All presets starting with asterisk, for example *common*, are intermediate and they will
|
||||
# not make it into the user interface.
|
||||
|
||||
|
@ -1128,6 +1151,7 @@ filament_density = 3.9
|
|||
filament_colour = #804040
|
||||
filament_max_volumetric_speed = 9
|
||||
filament_notes = "List of materials tested with standard print settings:\n\nColorFabb bronzeFill\nColorFabb brassFill\nColorFabb steelFill\nColorFabb copperFill"
|
||||
filament_vendor = ColorFabb
|
||||
|
||||
[filament:ColorFabb HT]
|
||||
inherits = *PET*
|
||||
|
@ -1145,11 +1169,13 @@ max_fan_speed = 20
|
|||
min_fan_speed = 10
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}45{endif}; Filament gcode"
|
||||
temperature = 270
|
||||
filament_vendor = ColorFabb
|
||||
|
||||
[filament:ColorFabb PLA-PHA]
|
||||
inherits = *PLA*
|
||||
filament_cost = 55.5
|
||||
filament_density = 1.24
|
||||
filament_vendor = ColorFabb
|
||||
|
||||
[filament:ColorFabb woodFill]
|
||||
inherits = *PLA*
|
||||
|
@ -1163,6 +1189,7 @@ filament_max_volumetric_speed = 10
|
|||
first_layer_temperature = 200
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}10{endif}; Filament gcode"
|
||||
temperature = 200
|
||||
filament_vendor = ColorFabb
|
||||
|
||||
[filament:ColorFabb corkFill]
|
||||
inherits = *PLA*
|
||||
|
@ -1175,6 +1202,7 @@ filament_max_volumetric_speed = 6
|
|||
first_layer_temperature = 220
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}10{endif}; Filament gcode"
|
||||
temperature = 220
|
||||
filament_vendor = ColorFabb
|
||||
|
||||
[filament:ColorFabb XT]
|
||||
inherits = *PET*
|
||||
|
@ -1184,6 +1212,7 @@ filament_density = 1.27
|
|||
first_layer_bed_temperature = 90
|
||||
first_layer_temperature = 260
|
||||
temperature = 270
|
||||
filament_vendor = ColorFabb
|
||||
|
||||
[filament:ColorFabb XT-CF20]
|
||||
inherits = *PET*
|
||||
|
@ -1199,6 +1228,7 @@ start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{el
|
|||
temperature = 260
|
||||
filament_retract_length = nil
|
||||
filament_retract_lift = 0.2
|
||||
filament_vendor = ColorFabb
|
||||
|
||||
[filament:ColorFabb nGen]
|
||||
inherits = *PET*
|
||||
|
@ -1211,6 +1241,7 @@ filament_type = NGEN
|
|||
first_layer_temperature = 240
|
||||
max_fan_speed = 35
|
||||
min_fan_speed = 20
|
||||
filament_vendor = ColorFabb
|
||||
|
||||
[filament:ColorFabb nGen flex]
|
||||
inherits = *FLEX*
|
||||
|
@ -1231,12 +1262,14 @@ temperature = 260
|
|||
filament_retract_length = nil
|
||||
filament_retract_lift = 0
|
||||
compatible_printers_condition = nozzle_diameter[0]>0.35 and num_extruders==1 && ! (printer_notes=~/.*PRINTER_VENDOR_PRUSA3D.*/ and printer_notes=~/.*PRINTER_MODEL_MK3.*/ and single_extruder_multi_material)
|
||||
filament_vendor = ColorFabb
|
||||
|
||||
[filament:E3D Edge]
|
||||
inherits = *PET*
|
||||
filament_cost = 56.9
|
||||
filament_density = 1.26
|
||||
filament_type = EDGE
|
||||
filament_vendor = E3D
|
||||
|
||||
[filament:E3D PC-ABS]
|
||||
inherits = *ABS*
|
||||
|
@ -1245,6 +1278,7 @@ filament_type = PC
|
|||
filament_density = 1.05
|
||||
first_layer_temperature = 270
|
||||
temperature = 270
|
||||
filament_vendor = E3D
|
||||
|
||||
[filament:Fillamentum ABS]
|
||||
inherits = *ABS*
|
||||
|
@ -1252,6 +1286,7 @@ filament_cost = 32.4
|
|||
filament_density = 1.04
|
||||
first_layer_temperature = 240
|
||||
temperature = 240
|
||||
filament_vendor = Fillamentum
|
||||
|
||||
[filament:Fillamentum ASA]
|
||||
inherits = *ABS*
|
||||
|
@ -1266,6 +1301,7 @@ slowdown_below_layer_time = 15
|
|||
first_layer_temperature = 265
|
||||
temperature = 265
|
||||
filament_type = ASA
|
||||
filament_vendor = Fillamentum
|
||||
|
||||
[filament:Prusament ASA]
|
||||
inherits = *ABS*
|
||||
|
@ -1296,6 +1332,7 @@ first_layer_temperature = 275
|
|||
max_fan_speed = 50
|
||||
min_fan_speed = 50
|
||||
temperature = 275
|
||||
filament_vendor = Fillamentum
|
||||
|
||||
[filament:Fillamentum Timberfill]
|
||||
inherits = *PLA*
|
||||
|
@ -1309,24 +1346,28 @@ filament_max_volumetric_speed = 10
|
|||
first_layer_temperature = 190
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}10{endif}; Filament gcode"
|
||||
temperature = 190
|
||||
filament_vendor = Fillamentum
|
||||
|
||||
[filament:Generic ABS]
|
||||
inherits = *ABS*
|
||||
filament_cost = 27.82
|
||||
filament_density = 1.04
|
||||
filament_notes = "List of materials tested with standard ABS print settings:\n\nEsun ABS\nFil-A-Gehr ABS\nHatchboxABS\nPlasty Mladec ABS"
|
||||
filament_vendor = Generic
|
||||
|
||||
[filament:Generic PET]
|
||||
inherits = *PET*
|
||||
filament_cost = 27.82
|
||||
filament_density = 1.27
|
||||
filament_notes = "List of manufacturers tested with standard PET print settings:\n\nE3D Edge\nFillamentum CPE GH100\nPlasty Mladec PETG"
|
||||
filament_vendor = Generic
|
||||
|
||||
[filament:Generic PLA]
|
||||
inherits = *PLA*
|
||||
filament_cost = 25.4
|
||||
filament_density = 1.24
|
||||
filament_notes = "List of materials tested with standard PLA print settings:\n\nDas Filament\nEsun PLA\nEUMAKERS PLA\nFiberlogy HD-PLA\nFillamentum PLA\nFloreon3D\nHatchbox PLA\nPlasty Mladec PLA\nPrimavalue PLA\nProto pasta Matte Fiber\nVerbatim PLA\nVerbatim BVOH"
|
||||
filament_vendor = Generic
|
||||
|
||||
[filament:Generic FLEX]
|
||||
inherits = *FLEX*
|
||||
|
@ -1347,6 +1388,7 @@ filament_colour = #3A80CA
|
|||
first_layer_bed_temperature = 100
|
||||
first_layer_temperature = 270
|
||||
temperature = 270
|
||||
filament_vendor = Polymaker
|
||||
|
||||
[filament:PrimaSelect PVA+]
|
||||
inherits = *PLA*
|
||||
|
@ -1363,12 +1405,14 @@ filament_type = PVA
|
|||
first_layer_temperature = 195
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}10{endif}; Filament gcode"
|
||||
temperature = 195
|
||||
filament_vendor = PrimaSelect
|
||||
|
||||
[filament:Prusa ABS]
|
||||
inherits = *ABS*
|
||||
filament_cost = 27.82
|
||||
filament_density = 1.08
|
||||
filament_notes = "List of materials tested with standard ABS print settings:\n\nEsun ABS\nFil-A-Gehr ABS\nHatchboxABS\nPlasty Mladec ABS"
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:*ABS MMU2*]
|
||||
inherits = Prusa ABS
|
||||
|
@ -1385,6 +1429,7 @@ filament_unloading_speed = 20
|
|||
|
||||
[filament:Generic ABS MMU2]
|
||||
inherits = *ABS MMU2*
|
||||
filament_vendor = Generic
|
||||
|
||||
[filament:Prusament ASA MMU2]
|
||||
inherits = *ABS MMU2*
|
||||
|
@ -1410,6 +1455,7 @@ start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{el
|
|||
|
||||
[filament:Prusa ABS MMU2]
|
||||
inherits = *ABS MMU2*
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusa HIPS]
|
||||
inherits = *ABS*
|
||||
|
@ -1428,6 +1474,7 @@ max_fan_speed = 20
|
|||
min_fan_speed = 20
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}10{endif}; Filament gcode"
|
||||
temperature = 220
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusa PET]
|
||||
inherits = *PET*
|
||||
|
@ -1435,6 +1482,7 @@ filament_cost = 27.82
|
|||
filament_density = 1.27
|
||||
filament_notes = "List of manufacturers tested with standard PET print settings:\n\nE3D Edge\nPlasty Mladec PETG"
|
||||
compatible_printers_condition = nozzle_diameter[0]!=0.6 and printer_model!="MK2SMM" and ! (printer_notes=~/.*PRINTER_VENDOR_PRUSA3D.*/ and printer_notes=~/.*PRINTER_MODEL_MK(2.5|3).*/ and single_extruder_multi_material)
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusament PETG]
|
||||
inherits = *PET*
|
||||
|
@ -1444,12 +1492,14 @@ filament_cost = 24.99
|
|||
filament_density = 1.27
|
||||
filament_type = PETG
|
||||
compatible_printers_condition = nozzle_diameter[0]!=0.6 and printer_model!="MK2SMM" and ! (printer_notes=~/.*PRINTER_VENDOR_PRUSA3D.*/ and printer_notes=~/.*PRINTER_MODEL_MK(2.5|3).*/ and single_extruder_multi_material)
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusa PET 0.6 nozzle]
|
||||
inherits = *PET06*
|
||||
filament_cost = 27.82
|
||||
filament_density = 1.27
|
||||
filament_notes = "List of manufacturers tested with standard PET print settings:\n\nE3D Edge\nPlasty Mladec PETG"
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusament PETG 0.6 nozzle]
|
||||
inherits = *PET06*
|
||||
|
@ -1458,6 +1508,7 @@ temperature = 250
|
|||
filament_cost = 24.99
|
||||
filament_density = 1.27
|
||||
filament_type = PETG
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:*PET MMU2*]
|
||||
inherits = Prusa PET
|
||||
|
@ -1485,9 +1536,11 @@ filament_max_volumetric_speed = 13
|
|||
|
||||
[filament:Generic PET MMU2]
|
||||
inherits = *PET MMU2*
|
||||
filament_vendor = Generic
|
||||
|
||||
[filament:Prusa PET MMU2]
|
||||
inherits = *PET MMU2*
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusament PETG MMU2]
|
||||
inherits = *PET MMU2*
|
||||
|
@ -1498,16 +1551,19 @@ inherits = *PET MMU2 06*
|
|||
|
||||
[filament:Prusa PET MMU2 0.6 nozzle]
|
||||
inherits = *PET MMU2 06*
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusament PETG MMU2 0.6 nozzle]
|
||||
inherits = *PET MMU2 06*
|
||||
filament_type = PETG
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusa PLA]
|
||||
inherits = *PLA*
|
||||
filament_cost = 25.4
|
||||
filament_density = 1.24
|
||||
filament_notes = "List of materials tested with standard PLA print settings:\n\nDas Filament\nEsun PLA\nEUMAKERS PLA\nFiberlogy HD-PLA\nFiberlogy PLA\nFillamentum PLA\nFloreon3D\nHatchbox PLA\nPlasty Mladec PLA\nPrimavalue PLA\nProto pasta Matte Fiber\nVerbatim PLA\nAmazonBasics PLA"
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusament PLA]
|
||||
inherits = *PLA*
|
||||
|
@ -1515,6 +1571,7 @@ temperature = 215
|
|||
filament_cost = 24.99
|
||||
filament_density = 1.24
|
||||
filament_notes = "Affordable filament for everyday printing in premium quality manufactured in-house by Josef Prusa"
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:*PLA MMU2*]
|
||||
inherits = Prusa PLA
|
||||
|
@ -1534,18 +1591,22 @@ filament_unloading_speed_start = 100
|
|||
|
||||
[filament:Generic PLA MMU2]
|
||||
inherits = *PLA MMU2*
|
||||
filament_vendor = Generic
|
||||
|
||||
[filament:Prusa PLA MMU2]
|
||||
inherits = *PLA MMU2*
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:Prusament PLA MMU2]
|
||||
inherits = *PLA MMU2*
|
||||
filament_vendor = Prusa
|
||||
|
||||
[filament:SemiFlex or Flexfill 98A]
|
||||
inherits = *FLEX*
|
||||
filament_cost = 82
|
||||
filament_density = 1.22
|
||||
filament_max_volumetric_speed = 1.35
|
||||
filament_vendor = Flexfill
|
||||
|
||||
[filament:Taulman Bridge]
|
||||
inherits = *common*
|
||||
|
@ -1567,6 +1628,7 @@ max_fan_speed = 5
|
|||
min_fan_speed = 0
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}10{endif}; Filament gcode"
|
||||
temperature = 250
|
||||
filament_vendor = Taulman
|
||||
|
||||
[filament:Taulman T-Glase]
|
||||
inherits = *PET*
|
||||
|
@ -1580,6 +1642,7 @@ first_layer_temperature = 240
|
|||
max_fan_speed = 5
|
||||
min_fan_speed = 0
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}30{endif}; Filament gcode"
|
||||
filament_vendor = Taulman
|
||||
|
||||
[filament:Verbatim BVOH]
|
||||
inherits = *common*
|
||||
|
@ -1603,6 +1666,7 @@ max_fan_speed = 100
|
|||
min_fan_speed = 100
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}10{endif}; Filament gcode"
|
||||
temperature = 210
|
||||
filament_vendor = Verbatim
|
||||
|
||||
[filament:Verbatim BVOH MMU2]
|
||||
inherits = Verbatim BVOH
|
||||
|
@ -1622,6 +1686,7 @@ filament_unload_time = 12
|
|||
filament_unloading_speed = 20
|
||||
filament_unloading_speed_start = 100
|
||||
filament_loading_speed_start = 19
|
||||
filament_vendor = Verbatim
|
||||
|
||||
[filament:PrimaSelect PVA+ MMU2]
|
||||
inherits = *common*
|
||||
|
@ -1660,6 +1725,7 @@ min_print_speed = 15
|
|||
slowdown_below_layer_time = 20
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}30{endif}; Filament gcode"
|
||||
temperature = 195
|
||||
filament_vendor = PrimaSelect
|
||||
|
||||
[filament:Verbatim PP]
|
||||
inherits = *common*
|
||||
|
@ -1682,6 +1748,7 @@ max_fan_speed = 100
|
|||
min_fan_speed = 100
|
||||
start_filament_gcode = "M900 K{if printer_notes=~/.*PRINTER_HAS_BOWDEN.*/}200{else}10{endif}; Filament gcode"
|
||||
temperature = 220
|
||||
filament_vendor = Verbatim
|
||||
|
||||
## Filaments MMU1
|
||||
|
||||
|
@ -1899,9 +1966,11 @@ exposure_time = 6
|
|||
initial_exposure_time = 40
|
||||
|
||||
[sla_material:BlueCast Keramaster Dental 0.025]
|
||||
material_type = Dental
|
||||
inherits = *common 0.025*
|
||||
exposure_time = 6
|
||||
initial_exposure_time = 45
|
||||
material_vendor = Bluecast
|
||||
|
||||
[sla_material:BlueCast X10 0.025]
|
||||
inherits = *common 0.025*
|
||||
|
@ -1912,6 +1981,7 @@ initial_exposure_time = 100
|
|||
inherits = *common 0.025*
|
||||
exposure_time = 6
|
||||
initial_exposure_time = 35
|
||||
material_vendor = Prusa
|
||||
|
||||
[sla_material:Prusa Grey Tough 0.025]
|
||||
inherits = *common 0.025*
|
||||
|
@ -1964,31 +2034,38 @@ initial_exposure_time = 35
|
|||
inherits = *common 0.05*
|
||||
exposure_time = 7
|
||||
initial_exposure_time = 35
|
||||
material_vendor = Bluecast
|
||||
|
||||
[sla_material:BlueCast Keramaster 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 8
|
||||
initial_exposure_time = 45
|
||||
material_vendor = Bluecast
|
||||
|
||||
[sla_material:BlueCast Keramaster Dental 0.05]
|
||||
material_type = Dental
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 7
|
||||
initial_exposure_time = 50
|
||||
material_vendor = Bluecast
|
||||
|
||||
[sla_material:BlueCast LCD-DLP Original 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 10
|
||||
initial_exposure_time = 60
|
||||
material_vendor = Bluecast
|
||||
|
||||
[sla_material:BlueCast Phrozen Wax 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 16
|
||||
initial_exposure_time = 50
|
||||
material_vendor = Bluecast
|
||||
|
||||
[sla_material:BlueCast S+ 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 9
|
||||
initial_exposure_time = 45
|
||||
material_vendor = Bluecast
|
||||
|
||||
[sla_material:BlueCast X10 0.05]
|
||||
inherits = *common 0.05*
|
||||
|
@ -1999,26 +2076,31 @@ initial_exposure_time = 100
|
|||
inherits = *common 0.05*
|
||||
exposure_time = 6
|
||||
initial_exposure_time = 40
|
||||
material_vendor = Monocure
|
||||
|
||||
[sla_material:Monocure 3D Blue Rapid Resin 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 7
|
||||
initial_exposure_time = 40
|
||||
material_vendor = Monocure
|
||||
|
||||
[sla_material:Monocure 3D Clear Rapid Resin 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 8
|
||||
initial_exposure_time = 40
|
||||
material_vendor = Monocure
|
||||
|
||||
[sla_material:Monocure 3D Grey Rapid Resin 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 10
|
||||
initial_exposure_time = 30
|
||||
material_vendor = Monocure
|
||||
|
||||
[sla_material:Monocure 3D White Rapid Resin 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 7
|
||||
initial_exposure_time = 40
|
||||
material_vendor = Monocure
|
||||
|
||||
[sla_material:3DM-HTR140 (high temperature) 0.05]
|
||||
inherits = *common 0.05*
|
||||
|
@ -2034,36 +2116,43 @@ initial_exposure_time = 25
|
|||
inherits = *common 0.05*
|
||||
exposure_time = 20
|
||||
initial_exposure_time = 40
|
||||
material_vendor = 3DM
|
||||
|
||||
[sla_material:3DM-DENT 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 7
|
||||
initial_exposure_time = 45
|
||||
material_vendor = 3DM
|
||||
|
||||
[sla_material:3DM-HR Green 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 15
|
||||
initial_exposure_time = 40
|
||||
material_vendor = 3DM
|
||||
|
||||
[sla_material:3DM-HR Red Wine 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 9
|
||||
initial_exposure_time = 35
|
||||
material_vendor = 3DM
|
||||
|
||||
[sla_material:3DM-XPRO White 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 9
|
||||
initial_exposure_time = 35
|
||||
material_vendor = 3DM
|
||||
|
||||
[sla_material:FTD Ash Grey 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 9
|
||||
initial_exposure_time = 40
|
||||
material_vendor = FTD
|
||||
|
||||
[sla_material:Harz Labs Model Resin Cherry 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 8
|
||||
initial_exposure_time = 45
|
||||
material_vendor = Harz Labs
|
||||
|
||||
[sla_material:Photocentric Hard Grey 0.05]
|
||||
inherits = *common 0.05*
|
||||
|
@ -2116,6 +2205,7 @@ initial_exposure_time = 35
|
|||
inherits = *common 0.05*
|
||||
exposure_time = 13
|
||||
initial_exposure_time = 40
|
||||
material_vendor = Prusa
|
||||
|
||||
## [sla_material:Prusa Yellow Solid 0.05]
|
||||
## inherits = *common 0.05*
|
||||
|
@ -2126,6 +2216,7 @@ initial_exposure_time = 40
|
|||
inherits = *common 0.05*
|
||||
exposure_time = 7.5
|
||||
initial_exposure_time = 35
|
||||
material_vendor = Prusa
|
||||
|
||||
## [sla_material:Prusa Transparent Green Tough 0.05]
|
||||
## inherits = *common 0.05*
|
||||
|
@ -2136,21 +2227,25 @@ initial_exposure_time = 35
|
|||
inherits = *common 0.05*
|
||||
exposure_time = 6
|
||||
initial_exposure_time = 35
|
||||
material_vendor = Prusa
|
||||
|
||||
[sla_material:Prusa Maroon Tough 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 7.5
|
||||
initial_exposure_time = 35
|
||||
material_vendor = Prusa
|
||||
|
||||
[sla_material:Prusa Pink Tough 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 8
|
||||
initial_exposure_time = 35
|
||||
material_vendor = Prusa
|
||||
|
||||
[sla_material:Prusa Azure Blue Tough 0.05]
|
||||
inherits = *common 0.05*
|
||||
exposure_time = 8
|
||||
initial_exposure_time = 35
|
||||
material_vendor = Prusa
|
||||
|
||||
[sla_material:Prusa Transparent Tough 0.05]
|
||||
inherits = *common 0.05*
|
||||
|
@ -2193,6 +2288,7 @@ initial_exposure_time = 15
|
|||
inherits = *common 0.035*
|
||||
exposure_time = 6
|
||||
initial_exposure_time = 35
|
||||
material_vendor = Prusa
|
||||
|
||||
########### Materials 0.1
|
||||
|
||||
|
@ -2235,6 +2331,7 @@ initial_exposure_time = 55
|
|||
inherits = *common 0.1*
|
||||
exposure_time = 8
|
||||
initial_exposure_time = 35
|
||||
material_vendor = Prusa
|
||||
|
||||
[sla_material:Prusa Green Casting 0.1]
|
||||
inherits = *common 0.1*
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
add_subdirectory(slabasebed)
|
||||
add_subdirectory(slasupporttree)
|
||||
add_subdirectory(openvdb)
|
||||
|
|
2
sandboxes/openvdb/CMakeLists.txt
Normal file
2
sandboxes/openvdb/CMakeLists.txt
Normal file
|
@ -0,0 +1,2 @@
|
|||
add_executable(openvdb_example openvdb_example.cpp)
|
||||
target_link_libraries(openvdb_example libslic3r)
|
37
sandboxes/openvdb/openvdb_example.cpp
Normal file
37
sandboxes/openvdb/openvdb_example.cpp
Normal file
|
@ -0,0 +1,37 @@
|
|||
#include <openvdb/openvdb.h>
|
||||
#include <iostream>
|
||||
|
||||
int main()
|
||||
{
|
||||
// Initialize the OpenVDB library. This must be called at least
|
||||
// once per program and may safely be called multiple times.
|
||||
openvdb::initialize();
|
||||
// Create an empty floating-point grid with background value 0.
|
||||
openvdb::FloatGrid::Ptr grid = openvdb::FloatGrid::create();
|
||||
std::cout << "Testing random access:" << std::endl;
|
||||
// Get an accessor for coordinate-based access to voxels.
|
||||
openvdb::FloatGrid::Accessor accessor = grid->getAccessor();
|
||||
// Define a coordinate with large signed indices.
|
||||
openvdb::Coord xyz(1000, -200000000, 30000000);
|
||||
// Set the voxel value at (1000, -200000000, 30000000) to 1.
|
||||
accessor.setValue(xyz, 1.0);
|
||||
// Verify that the voxel value at (1000, -200000000, 30000000) is 1.
|
||||
std::cout << "Grid" << xyz << " = " << accessor.getValue(xyz) << std::endl;
|
||||
// Reset the coordinates to those of a different voxel.
|
||||
xyz.reset(1000, 200000000, -30000000);
|
||||
// Verify that the voxel value at (1000, 200000000, -30000000) is
|
||||
// the background value, 0.
|
||||
std::cout << "Grid" << xyz << " = " << accessor.getValue(xyz) << std::endl;
|
||||
// Set the voxel value at (1000, 200000000, -30000000) to 2.
|
||||
accessor.setValue(xyz, 2.0);
|
||||
// Set the voxels at the two extremes of the available coordinate space.
|
||||
// For 32-bit signed coordinates these are (-2147483648, -2147483648, -2147483648)
|
||||
// and (2147483647, 2147483647, 2147483647).
|
||||
accessor.setValue(openvdb::Coord::min(), 3.0f);
|
||||
accessor.setValue(openvdb::Coord::max(), 4.0f);
|
||||
std::cout << "Testing sequential access:" << std::endl;
|
||||
// Print all active ("on") voxels by means of an iterator.
|
||||
for (openvdb::FloatGrid::ValueOnCIter iter = grid->cbeginValueOn(); iter; ++iter) {
|
||||
std::cout << "Grid" << iter.getCoord() << " = " << *iter << std::endl;
|
||||
}
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
add_executable(slabasebed EXCLUDE_FROM_ALL slabasebed.cpp)
|
||||
target_link_libraries(slabasebed libslic3r ${Boost_LIBRARIES} ${TBB_LIBRARIES} ${Boost_LIBRARIES} ${CMAKE_DL_LIBS})
|
|
@ -1,85 +0,0 @@
|
|||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
|
||||
#include <libslic3r/libslic3r.h>
|
||||
#include <libslic3r/TriangleMesh.hpp>
|
||||
#include <libslic3r/Tesselate.hpp>
|
||||
#include <libslic3r/ClipperUtils.hpp>
|
||||
#include <libslic3r/SLA/SLABasePool.hpp>
|
||||
#include <libslic3r/SLA/SLABoilerPlate.hpp>
|
||||
#include <libnest2d/tools/benchmark.h>
|
||||
|
||||
const std::string USAGE_STR = {
|
||||
"Usage: slabasebed stlfilename.stl"
|
||||
};
|
||||
|
||||
namespace Slic3r { namespace sla {
|
||||
|
||||
Contour3D create_base_pool(const Polygons &ground_layer,
|
||||
const ExPolygons &holes = {},
|
||||
const PoolConfig& cfg = PoolConfig());
|
||||
|
||||
Contour3D walls(const Polygon& floor_plate, const Polygon& ceiling,
|
||||
double floor_z_mm, double ceiling_z_mm,
|
||||
double offset_difference_mm, ThrowOnCancel thr);
|
||||
|
||||
void offset(ExPolygon& sh, coord_t distance);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
int main(const int argc, const char *argv[]) {
|
||||
using namespace Slic3r;
|
||||
using std::cout; using std::endl;
|
||||
|
||||
if(argc < 2) {
|
||||
cout << USAGE_STR << endl;
|
||||
return EXIT_SUCCESS;
|
||||
}
|
||||
|
||||
TriangleMesh model;
|
||||
Benchmark bench;
|
||||
|
||||
model.ReadSTLFile(argv[1]);
|
||||
model.align_to_origin();
|
||||
|
||||
ExPolygons ground_slice;
|
||||
sla::base_plate(model, ground_slice, 0.1f);
|
||||
if(ground_slice.empty()) return EXIT_FAILURE;
|
||||
|
||||
ground_slice = offset_ex(ground_slice, 0.5);
|
||||
ExPolygon gndfirst; gndfirst = ground_slice.front();
|
||||
sla::breakstick_holes(gndfirst, 0.5, 10, 0.3);
|
||||
|
||||
sla::Contour3D mesh;
|
||||
|
||||
bench.start();
|
||||
|
||||
sla::PoolConfig cfg;
|
||||
cfg.min_wall_height_mm = 0;
|
||||
cfg.edge_radius_mm = 0;
|
||||
mesh = sla::create_base_pool(to_polygons(ground_slice), {}, cfg);
|
||||
|
||||
bench.stop();
|
||||
|
||||
cout << "Base pool creation time: " << std::setprecision(10)
|
||||
<< bench.getElapsedSec() << " seconds." << endl;
|
||||
|
||||
for(auto& trind : mesh.indices) {
|
||||
Vec3d p0 = mesh.points[size_t(trind[0])];
|
||||
Vec3d p1 = mesh.points[size_t(trind[1])];
|
||||
Vec3d p2 = mesh.points[size_t(trind[2])];
|
||||
Vec3d p01 = p1 - p0;
|
||||
Vec3d p02 = p2 - p0;
|
||||
auto a = p01.cross(p02).norm() / 2.0;
|
||||
if(std::abs(a) < 1e-6) std::cout << "degenerate triangle" << std::endl;
|
||||
}
|
||||
|
||||
// basepool.write_ascii("out.stl");
|
||||
|
||||
std::fstream outstream("out.obj", std::fstream::out);
|
||||
mesh.to_obj(outstream);
|
||||
|
||||
return EXIT_SUCCESS;
|
||||
}
|
|
@ -16,7 +16,6 @@ add_subdirectory(semver)
|
|||
add_subdirectory(libigl)
|
||||
|
||||
# Adding libnest2d project for bin packing...
|
||||
set(LIBNEST2D_UNITTESTS ON CACHE BOOL "Force generating unittests for libnest2d")
|
||||
add_subdirectory(libnest2d)
|
||||
|
||||
add_subdirectory(libslic3r)
|
||||
|
|
|
@ -167,6 +167,7 @@ int CLI::run(int argc, char **argv)
|
|||
// sla_print_config.apply(m_print_config, true);
|
||||
|
||||
// Loop through transform options.
|
||||
bool user_center_specified = false;
|
||||
for (auto const &opt_key : m_transforms) {
|
||||
if (opt_key == "merge") {
|
||||
Model m;
|
||||
|
@ -209,6 +210,7 @@ int CLI::run(int argc, char **argv)
|
|||
for (auto &model : m_models)
|
||||
model.duplicate_objects_grid(x, y, (distance > 0) ? distance : 6); // TODO: this is not the right place for setting a default
|
||||
} else if (opt_key == "center") {
|
||||
user_center_specified = true;
|
||||
for (auto &model : m_models) {
|
||||
model.add_default_instances();
|
||||
// this affects instances:
|
||||
|
@ -403,7 +405,9 @@ int CLI::run(int argc, char **argv)
|
|||
if (! m_config.opt_bool("dont_arrange")) {
|
||||
//FIXME make the min_object_distance configurable.
|
||||
model.arrange_objects(fff_print.config().min_object_distance());
|
||||
model.center_instances_around_point(m_config.option<ConfigOptionPoint>("center")->value);
|
||||
model.center_instances_around_point((! user_center_specified && m_print_config.has("bed_shape")) ?
|
||||
BoundingBoxf(m_print_config.opt<ConfigOptionPoints>("bed_shape")->values).center() :
|
||||
m_config.option<ConfigOptionPoint>("center")->value);
|
||||
}
|
||||
if (printer_technology == ptFFF) {
|
||||
for (auto* mo : model.objects)
|
||||
|
|
|
@ -90,7 +90,7 @@ struct stl_neighbors {
|
|||
|
||||
struct stl_stats {
|
||||
stl_stats() { memset(&header, 0, 81); }
|
||||
char header[81];// = "";
|
||||
char header[81];
|
||||
stl_type type = (stl_type)0;
|
||||
uint32_t number_of_facets = 0;
|
||||
stl_vertex max = stl_vertex::Zero();
|
||||
|
|
|
@ -156,7 +156,7 @@ namespace agg
|
|||
|
||||
//-------------------------------------------------------------------
|
||||
template<class VertexSource>
|
||||
void add_path(VertexSource& vs, unsigned path_id=0)
|
||||
void add_path(VertexSource &&vs, unsigned path_id=0)
|
||||
{
|
||||
double x;
|
||||
double y;
|
||||
|
|
|
@ -1,134 +1,31 @@
|
|||
cmake_minimum_required(VERSION 3.0)
|
||||
|
||||
project(Libnest2D)
|
||||
|
||||
if(CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
|
||||
# Update if necessary
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wno-long-long ")
|
||||
endif()
|
||||
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED)
|
||||
|
||||
# Add our own cmake module path.
|
||||
list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake_modules/)
|
||||
|
||||
option(LIBNEST2D_UNITTESTS "If enabled, googletest framework will be downloaded
|
||||
and the provided unit tests will be included in the build." OFF)
|
||||
|
||||
option(LIBNEST2D_BUILD_EXAMPLES "If enabled, examples will be built." OFF)
|
||||
|
||||
option(LIBNEST2D_HEADER_ONLY "If enabled static library will not be built." ON)
|
||||
|
||||
set(GEOMETRY_BACKENDS clipper boost eigen)
|
||||
set(LIBNEST2D_GEOMETRIES clipper CACHE STRING "Geometry backend")
|
||||
set_property(CACHE LIBNEST2D_GEOMETRIES PROPERTY STRINGS ${GEOMETRY_BACKENDS})
|
||||
list(FIND GEOMETRY_BACKENDS ${LIBNEST2D_GEOMETRIES} GEOMETRY_TYPE)
|
||||
if(${GEOMETRY_TYPE} EQUAL -1)
|
||||
message(FATAL_ERROR "Option ${LIBNEST2D_GEOMETRIES} not supported, valid entries are ${GEOMETRY_BACKENDS}")
|
||||
endif()
|
||||
|
||||
set(OPTIMIZERS nlopt optimlib)
|
||||
set(LIBNEST2D_OPTIMIZER nlopt CACHE STRING "Optimization backend")
|
||||
set_property(CACHE LIBNEST2D_OPTIMIZER PROPERTY STRINGS ${OPTIMIZERS})
|
||||
list(FIND OPTIMIZERS ${LIBNEST2D_OPTIMIZER} OPTIMIZER_TYPE)
|
||||
if(${OPTIMIZER_TYPE} EQUAL -1)
|
||||
message(FATAL_ERROR "Option ${LIBNEST2D_OPTIMIZER} not supported, valid entries are ${OPTIMIZERS}")
|
||||
endif()
|
||||
|
||||
add_library(libnest2d INTERFACE)
|
||||
|
||||
set(SRC_DIR ${PROJECT_SOURCE_DIR}/include)
|
||||
|
||||
set(LIBNEST2D_SRCFILES
|
||||
${SRC_DIR}/libnest2d/libnest2d.hpp # Templates only
|
||||
${SRC_DIR}/libnest2d/geometry_traits.hpp
|
||||
${SRC_DIR}/libnest2d/geometry_traits_nfp.hpp
|
||||
${SRC_DIR}/libnest2d/common.hpp
|
||||
${SRC_DIR}/libnest2d/optimizer.hpp
|
||||
${SRC_DIR}/libnest2d/utils/metaloop.hpp
|
||||
${SRC_DIR}/libnest2d/utils/rotfinder.hpp
|
||||
${SRC_DIR}/libnest2d/utils/rotcalipers.hpp
|
||||
${SRC_DIR}/libnest2d/utils/bigint.hpp
|
||||
${SRC_DIR}/libnest2d/utils/rational.hpp
|
||||
${SRC_DIR}/libnest2d/placers/placer_boilerplate.hpp
|
||||
${SRC_DIR}/libnest2d/placers/bottomleftplacer.hpp
|
||||
${SRC_DIR}/libnest2d/placers/nfpplacer.hpp
|
||||
${SRC_DIR}/libnest2d/selections/selection_boilerplate.hpp
|
||||
${SRC_DIR}/libnest2d/selections/filler.hpp
|
||||
${SRC_DIR}/libnest2d/selections/firstfit.hpp
|
||||
${SRC_DIR}/libnest2d/selections/djd_heuristic.hpp
|
||||
include/libnest2d/libnest2d.hpp
|
||||
include/libnest2d/nester.hpp
|
||||
include/libnest2d/geometry_traits.hpp
|
||||
include/libnest2d/geometry_traits_nfp.hpp
|
||||
include/libnest2d/common.hpp
|
||||
include/libnest2d/optimizer.hpp
|
||||
include/libnest2d/utils/metaloop.hpp
|
||||
include/libnest2d/utils/rotfinder.hpp
|
||||
include/libnest2d/utils/rotcalipers.hpp
|
||||
include/libnest2d/placers/placer_boilerplate.hpp
|
||||
include/libnest2d/placers/bottomleftplacer.hpp
|
||||
include/libnest2d/placers/nfpplacer.hpp
|
||||
include/libnest2d/selections/selection_boilerplate.hpp
|
||||
#include/libnest2d/selections/filler.hpp
|
||||
include/libnest2d/selections/firstfit.hpp
|
||||
#include/libnest2d/selections/djd_heuristic.hpp
|
||||
include/libnest2d/backends/clipper/geometries.hpp
|
||||
include/libnest2d/backends/clipper/clipper_polygon.hpp
|
||||
include/libnest2d/optimizers/nlopt/nlopt_boilerplate.hpp
|
||||
include/libnest2d/optimizers/nlopt/simplex.hpp
|
||||
include/libnest2d/optimizers/nlopt/subplex.hpp
|
||||
include/libnest2d/optimizers/nlopt/genetic.hpp
|
||||
src/libnest2d.cpp
|
||||
)
|
||||
|
||||
set(TBB_STATIC ON)
|
||||
find_package(TBB QUIET)
|
||||
if(TBB_FOUND)
|
||||
message(STATUS "Parallelization with Intel TBB")
|
||||
target_include_directories(libnest2d INTERFACE ${TBB_INCLUDE_DIRS})
|
||||
target_compile_definitions(libnest2d INTERFACE ${TBB_DEFINITIONS} -DUSE_TBB)
|
||||
if(MSVC)
|
||||
# Suppress implicit linking of the TBB libraries by the Visual Studio compiler.
|
||||
target_compile_definitions(libnest2d INTERFACE -D__TBB_NO_IMPLICIT_LINKAGE)
|
||||
endif()
|
||||
# The Intel TBB library will use the std::exception_ptr feature of C++11.
|
||||
target_compile_definitions(libnest2d INTERFACE -DTBB_USE_CAPTURED_EXCEPTION=0)
|
||||
add_library(libnest2d ${LIBNEST2D_SRCFILES})
|
||||
|
||||
find_package(Threads REQUIRED)
|
||||
target_link_libraries(libnest2d INTERFACE
|
||||
tbb # VS debug mode needs linking this way:
|
||||
# ${TBB_LIBRARIES}
|
||||
${CMAKE_DL_LIBS}
|
||||
Threads::Threads
|
||||
)
|
||||
else()
|
||||
find_package(OpenMP QUIET)
|
||||
|
||||
if(OpenMP_CXX_FOUND)
|
||||
message(STATUS "Parallelization with OpenMP")
|
||||
target_include_directories(libnest2d INTERFACE OpenMP::OpenMP_CXX)
|
||||
target_link_libraries(libnest2d INTERFACE OpenMP::OpenMP_CXX)
|
||||
else()
|
||||
message("Parallelization with C++11 threads")
|
||||
find_package(Threads REQUIRED)
|
||||
target_link_libraries(libnest2d INTERFACE Threads::Threads)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
add_subdirectory(${SRC_DIR}/libnest2d/backends/${LIBNEST2D_GEOMETRIES})
|
||||
target_link_libraries(libnest2d INTERFACE ${LIBNEST2D_GEOMETRIES}Backend)
|
||||
|
||||
add_subdirectory(${SRC_DIR}/libnest2d/optimizers/${LIBNEST2D_OPTIMIZER})
|
||||
target_link_libraries(libnest2d INTERFACE ${LIBNEST2D_OPTIMIZER}Optimizer)
|
||||
|
||||
# target_sources(libnest2d INTERFACE ${LIBNEST2D_SRCFILES})
|
||||
target_include_directories(libnest2d INTERFACE ${SRC_DIR})
|
||||
|
||||
if(NOT LIBNEST2D_HEADER_ONLY)
|
||||
set(LIBNAME libnest2d_${LIBNEST2D_GEOMETRIES}_${LIBNEST2D_OPTIMIZER})
|
||||
add_library(${LIBNAME} ${PROJECT_SOURCE_DIR}/src/libnest2d.cpp)
|
||||
target_link_libraries(${LIBNAME} PUBLIC libnest2d)
|
||||
target_compile_definitions(${LIBNAME} PUBLIC LIBNEST2D_STATIC)
|
||||
endif()
|
||||
|
||||
if(LIBNEST2D_BUILD_EXAMPLES)
|
||||
|
||||
add_executable(example examples/main.cpp
|
||||
# tools/libnfpglue.hpp
|
||||
# tools/libnfpglue.cpp
|
||||
tools/nfp_svgnest.hpp
|
||||
tools/nfp_svgnest_glue.hpp
|
||||
tools/svgtools.hpp
|
||||
tests/printer_parts.cpp
|
||||
tests/printer_parts.h
|
||||
)
|
||||
|
||||
if(NOT LIBNEST2D_HEADER_ONLY)
|
||||
target_link_libraries(example ${LIBNAME})
|
||||
else()
|
||||
target_link_libraries(example libnest2d)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(LIBNEST2D_UNITTESTS)
|
||||
add_subdirectory(${PROJECT_SOURCE_DIR}/tests)
|
||||
endif()
|
||||
target_include_directories(libnest2d PUBLIC ${CMAKE_CURRENT_LIST_DIR}/include)
|
||||
target_link_libraries(libnest2d PUBLIC clipper NLopt::nlopt TBB::tbb Boost::boost)
|
||||
target_compile_definitions(libnest2d PUBLIC USE_TBB LIBNEST2D_STATIC LIBNEST2D_OPTIMIZER_nlopt LIBNEST2D_GEOMETRIES_clipper)
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
include(DownloadProject)
|
||||
|
||||
if (CMAKE_VERSION VERSION_LESS 3.2)
|
||||
set(UPDATE_DISCONNECTED_IF_AVAILABLE "")
|
||||
else()
|
||||
set(UPDATE_DISCONNECTED_IF_AVAILABLE "UPDATE_DISCONNECTED 1")
|
||||
endif()
|
||||
|
||||
set(URL_NLOPT "https://github.com/stevengj/nlopt.git"
|
||||
CACHE STRING "Location of the nlopt git repository")
|
||||
|
||||
# set(NLopt_DIR ${CMAKE_BINARY_DIR}/nlopt)
|
||||
include(DownloadProject)
|
||||
download_project( PROJ nlopt
|
||||
GIT_REPOSITORY ${URL_NLOPT}
|
||||
GIT_TAG v2.5.0
|
||||
# CMAKE_CACHE_ARGS -DBUILD_SHARED_LIBS:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX=${NLopt_DIR}
|
||||
${UPDATE_DISCONNECTED_IF_AVAILABLE}
|
||||
)
|
||||
|
||||
set(SHARED_LIBS_STATE BUILD_SHARED_LIBS)
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_PYTHON OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_OCTAVE OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_MATLAB OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_GUILE OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_SWIG OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_LINK_PYTHON OFF CACHE BOOL "" FORCE)
|
||||
|
||||
add_subdirectory(${nlopt_SOURCE_DIR} ${nlopt_BINARY_DIR})
|
||||
|
||||
set(NLopt_LIBS nlopt)
|
||||
set(NLopt_INCLUDE_DIR ${nlopt_BINARY_DIR}
|
||||
${nlopt_BINARY_DIR}/src/api)
|
||||
set(SHARED_LIBS_STATE ${SHARED_STATE})
|
|
@ -1,17 +0,0 @@
|
|||
# Distributed under the OSI-approved MIT License. See accompanying
|
||||
# file LICENSE or https://github.com/Crascit/DownloadProject for details.
|
||||
|
||||
cmake_minimum_required(VERSION 2.8.2)
|
||||
|
||||
project(${DL_ARGS_PROJ}-download NONE)
|
||||
|
||||
include(ExternalProject)
|
||||
ExternalProject_Add(${DL_ARGS_PROJ}-download
|
||||
${DL_ARGS_UNPARSED_ARGUMENTS}
|
||||
SOURCE_DIR "${DL_ARGS_SOURCE_DIR}"
|
||||
BINARY_DIR "${DL_ARGS_BINARY_DIR}"
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
TEST_COMMAND ""
|
||||
)
|
|
@ -1,182 +0,0 @@
|
|||
# Distributed under the OSI-approved MIT License. See accompanying
|
||||
# file LICENSE or https://github.com/Crascit/DownloadProject for details.
|
||||
#
|
||||
# MODULE: DownloadProject
|
||||
#
|
||||
# PROVIDES:
|
||||
# download_project( PROJ projectName
|
||||
# [PREFIX prefixDir]
|
||||
# [DOWNLOAD_DIR downloadDir]
|
||||
# [SOURCE_DIR srcDir]
|
||||
# [BINARY_DIR binDir]
|
||||
# [QUIET]
|
||||
# ...
|
||||
# )
|
||||
#
|
||||
# Provides the ability to download and unpack a tarball, zip file, git repository,
|
||||
# etc. at configure time (i.e. when the cmake command is run). How the downloaded
|
||||
# and unpacked contents are used is up to the caller, but the motivating case is
|
||||
# to download source code which can then be included directly in the build with
|
||||
# add_subdirectory() after the call to download_project(). Source and build
|
||||
# directories are set up with this in mind.
|
||||
#
|
||||
# The PROJ argument is required. The projectName value will be used to construct
|
||||
# the following variables upon exit (obviously replace projectName with its actual
|
||||
# value):
|
||||
#
|
||||
# projectName_SOURCE_DIR
|
||||
# projectName_BINARY_DIR
|
||||
#
|
||||
# The SOURCE_DIR and BINARY_DIR arguments are optional and would not typically
|
||||
# need to be provided. They can be specified if you want the downloaded source
|
||||
# and build directories to be located in a specific place. The contents of
|
||||
# projectName_SOURCE_DIR and projectName_BINARY_DIR will be populated with the
|
||||
# locations used whether you provide SOURCE_DIR/BINARY_DIR or not.
|
||||
#
|
||||
# The DOWNLOAD_DIR argument does not normally need to be set. It controls the
|
||||
# location of the temporary CMake build used to perform the download.
|
||||
#
|
||||
# The PREFIX argument can be provided to change the base location of the default
|
||||
# values of DOWNLOAD_DIR, SOURCE_DIR and BINARY_DIR. If all of those three arguments
|
||||
# are provided, then PREFIX will have no effect. The default value for PREFIX is
|
||||
# CMAKE_BINARY_DIR.
|
||||
#
|
||||
# The QUIET option can be given if you do not want to show the output associated
|
||||
# with downloading the specified project.
|
||||
#
|
||||
# In addition to the above, any other options are passed through unmodified to
|
||||
# ExternalProject_Add() to perform the actual download, patch and update steps.
|
||||
# The following ExternalProject_Add() options are explicitly prohibited (they
|
||||
# are reserved for use by the download_project() command):
|
||||
#
|
||||
# CONFIGURE_COMMAND
|
||||
# BUILD_COMMAND
|
||||
# INSTALL_COMMAND
|
||||
# TEST_COMMAND
|
||||
#
|
||||
# Only those ExternalProject_Add() arguments which relate to downloading, patching
|
||||
# and updating of the project sources are intended to be used. Also note that at
|
||||
# least one set of download-related arguments are required.
|
||||
#
|
||||
# If using CMake 3.2 or later, the UPDATE_DISCONNECTED option can be used to
|
||||
# prevent a check at the remote end for changes every time CMake is run
|
||||
# after the first successful download. See the documentation of the ExternalProject
|
||||
# module for more information. It is likely you will want to use this option if it
|
||||
# is available to you. Note, however, that the ExternalProject implementation contains
|
||||
# bugs which result in incorrect handling of the UPDATE_DISCONNECTED option when
|
||||
# using the URL download method or when specifying a SOURCE_DIR with no download
|
||||
# method. Fixes for these have been created, the last of which is scheduled for
|
||||
# inclusion in CMake 3.8.0. Details can be found here:
|
||||
#
|
||||
# https://gitlab.kitware.com/cmake/cmake/commit/bdca68388bd57f8302d3c1d83d691034b7ffa70c
|
||||
# https://gitlab.kitware.com/cmake/cmake/issues/16428
|
||||
#
|
||||
# If you experience build errors related to the update step, consider avoiding
|
||||
# the use of UPDATE_DISCONNECTED.
|
||||
#
|
||||
# EXAMPLE USAGE:
|
||||
#
|
||||
# include(DownloadProject)
|
||||
# download_project(PROJ googletest
|
||||
# GIT_REPOSITORY https://github.com/google/googletest.git
|
||||
# GIT_TAG master
|
||||
# UPDATE_DISCONNECTED 1
|
||||
# QUIET
|
||||
# )
|
||||
#
|
||||
# add_subdirectory(${googletest_SOURCE_DIR} ${googletest_BINARY_DIR})
|
||||
#
|
||||
#========================================================================================
|
||||
|
||||
|
||||
set(_DownloadProjectDir "${CMAKE_CURRENT_LIST_DIR}")
|
||||
|
||||
include(CMakeParseArguments)
|
||||
|
||||
function(download_project)
|
||||
|
||||
set(options QUIET)
|
||||
set(oneValueArgs
|
||||
PROJ
|
||||
PREFIX
|
||||
DOWNLOAD_DIR
|
||||
SOURCE_DIR
|
||||
BINARY_DIR
|
||||
# Prevent the following from being passed through
|
||||
CONFIGURE_COMMAND
|
||||
BUILD_COMMAND
|
||||
INSTALL_COMMAND
|
||||
TEST_COMMAND
|
||||
)
|
||||
set(multiValueArgs "")
|
||||
|
||||
cmake_parse_arguments(DL_ARGS "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
|
||||
|
||||
# Hide output if requested
|
||||
if (DL_ARGS_QUIET)
|
||||
set(OUTPUT_QUIET "OUTPUT_QUIET")
|
||||
else()
|
||||
unset(OUTPUT_QUIET)
|
||||
message(STATUS "Downloading/updating ${DL_ARGS_PROJ}")
|
||||
endif()
|
||||
|
||||
# Set up where we will put our temporary CMakeLists.txt file and also
|
||||
# the base point below which the default source and binary dirs will be.
|
||||
# The prefix must always be an absolute path.
|
||||
if (NOT DL_ARGS_PREFIX)
|
||||
set(DL_ARGS_PREFIX "${CMAKE_BINARY_DIR}")
|
||||
else()
|
||||
get_filename_component(DL_ARGS_PREFIX "${DL_ARGS_PREFIX}" ABSOLUTE
|
||||
BASE_DIR "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
endif()
|
||||
if (NOT DL_ARGS_DOWNLOAD_DIR)
|
||||
set(DL_ARGS_DOWNLOAD_DIR "${DL_ARGS_PREFIX}/${DL_ARGS_PROJ}-download")
|
||||
endif()
|
||||
|
||||
# Ensure the caller can know where to find the source and build directories
|
||||
if (NOT DL_ARGS_SOURCE_DIR)
|
||||
set(DL_ARGS_SOURCE_DIR "${DL_ARGS_PREFIX}/${DL_ARGS_PROJ}-src")
|
||||
endif()
|
||||
if (NOT DL_ARGS_BINARY_DIR)
|
||||
set(DL_ARGS_BINARY_DIR "${DL_ARGS_PREFIX}/${DL_ARGS_PROJ}-build")
|
||||
endif()
|
||||
set(${DL_ARGS_PROJ}_SOURCE_DIR "${DL_ARGS_SOURCE_DIR}" PARENT_SCOPE)
|
||||
set(${DL_ARGS_PROJ}_BINARY_DIR "${DL_ARGS_BINARY_DIR}" PARENT_SCOPE)
|
||||
|
||||
# The way that CLion manages multiple configurations, it causes a copy of
|
||||
# the CMakeCache.txt to be copied across due to it not expecting there to
|
||||
# be a project within a project. This causes the hard-coded paths in the
|
||||
# cache to be copied and builds to fail. To mitigate this, we simply
|
||||
# remove the cache if it exists before we configure the new project. It
|
||||
# is safe to do so because it will be re-generated. Since this is only
|
||||
# executed at the configure step, it should not cause additional builds or
|
||||
# downloads.
|
||||
file(REMOVE "${DL_ARGS_DOWNLOAD_DIR}/CMakeCache.txt")
|
||||
|
||||
# Create and build a separate CMake project to carry out the download.
|
||||
# If we've already previously done these steps, they will not cause
|
||||
# anything to be updated, so extra rebuilds of the project won't occur.
|
||||
# Make sure to pass through CMAKE_MAKE_PROGRAM in case the main project
|
||||
# has this set to something not findable on the PATH.
|
||||
configure_file("${_DownloadProjectDir}/DownloadProject.CMakeLists.cmake.in"
|
||||
"${DL_ARGS_DOWNLOAD_DIR}/CMakeLists.txt")
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} -G "${CMAKE_GENERATOR}"
|
||||
-D "CMAKE_MAKE_PROGRAM:FILE=${CMAKE_MAKE_PROGRAM}"
|
||||
.
|
||||
RESULT_VARIABLE result
|
||||
${OUTPUT_QUIET}
|
||||
WORKING_DIRECTORY "${DL_ARGS_DOWNLOAD_DIR}"
|
||||
)
|
||||
if(result)
|
||||
message(FATAL_ERROR "CMake step for ${DL_ARGS_PROJ} failed: ${result}")
|
||||
endif()
|
||||
execute_process(COMMAND ${CMAKE_COMMAND} --build .
|
||||
RESULT_VARIABLE result
|
||||
${OUTPUT_QUIET}
|
||||
WORKING_DIRECTORY "${DL_ARGS_DOWNLOAD_DIR}"
|
||||
)
|
||||
if(result)
|
||||
message(FATAL_ERROR "Build step for ${DL_ARGS_PROJ} failed: ${result}")
|
||||
endif()
|
||||
|
||||
endfunction()
|
|
@ -1,58 +0,0 @@
|
|||
# Find Clipper library (http://www.angusj.com/delphi/clipper.php).
|
||||
# The following variables are set
|
||||
#
|
||||
# CLIPPER_FOUND
|
||||
# CLIPPER_INCLUDE_DIRS
|
||||
# CLIPPER_LIBRARIES
|
||||
#
|
||||
# It searches the environment variable $CLIPPER_PATH automatically.
|
||||
|
||||
FIND_PATH(CLIPPER_INCLUDE_DIRS clipper.hpp
|
||||
$ENV{CLIPPER_PATH}
|
||||
$ENV{CLIPPER_PATH}/cpp/
|
||||
$ENV{CLIPPER_PATH}/include/
|
||||
$ENV{CLIPPER_PATH}/include/polyclipping/
|
||||
${PROJECT_SOURCE_DIR}/python/pymesh/third_party/include/
|
||||
${PROJECT_SOURCE_DIR}/python/pymesh/third_party/include/polyclipping/
|
||||
${CMAKE_PREFIX_PATH}/include/polyclipping
|
||||
${CMAKE_PREFIX_PATH}/include/
|
||||
/opt/local/include/
|
||||
/opt/local/include/polyclipping/
|
||||
/usr/local/include/
|
||||
/usr/local/include/polyclipping/
|
||||
/usr/include
|
||||
/usr/include/polyclipping/)
|
||||
|
||||
FIND_LIBRARY(CLIPPER_LIBRARIES polyclipping
|
||||
$ENV{CLIPPER_PATH}
|
||||
$ENV{CLIPPER_PATH}/cpp/
|
||||
$ENV{CLIPPER_PATH}/cpp/build/
|
||||
$ENV{CLIPPER_PATH}/lib/
|
||||
$ENV{CLIPPER_PATH}/lib/polyclipping/
|
||||
${PROJECT_SOURCE_DIR}/python/pymesh/third_party/lib/
|
||||
${PROJECT_SOURCE_DIR}/python/pymesh/third_party/lib/polyclipping/
|
||||
${CMAKE_PREFIX_PATH}/lib/
|
||||
${CMAKE_PREFIX_PATH}/lib/polyclipping/
|
||||
/opt/local/lib/
|
||||
/opt/local/lib/polyclipping/
|
||||
/usr/local/lib/
|
||||
/usr/local/lib/polyclipping/
|
||||
/usr/lib/polyclipping)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(Clipper
|
||||
"Clipper library cannot be found. Consider set CLIPPER_PATH environment variable"
|
||||
CLIPPER_INCLUDE_DIRS
|
||||
CLIPPER_LIBRARIES)
|
||||
|
||||
MARK_AS_ADVANCED(
|
||||
CLIPPER_INCLUDE_DIRS
|
||||
CLIPPER_LIBRARIES)
|
||||
|
||||
if(CLIPPER_FOUND)
|
||||
add_library(Clipper::Clipper INTERFACE IMPORTED)
|
||||
set_target_properties(Clipper::Clipper PROPERTIES INTERFACE_LINK_LIBRARIES ${CLIPPER_LIBRARIES})
|
||||
set_target_properties(Clipper::Clipper PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${CLIPPER_INCLUDE_DIRS})
|
||||
#target_link_libraries(Clipper::Clipper INTERFACE ${CLIPPER_LIBRARIES})
|
||||
#target_include_directories(Clipper::Clipper INTERFACE ${CLIPPER_INCLUDE_DIRS})
|
||||
endif()
|
|
@ -1,141 +0,0 @@
|
|||
#ifndef LIBNEST2D_H
|
||||
#define LIBNEST2D_H
|
||||
|
||||
// The type of backend should be set conditionally by the cmake configuriation
|
||||
// for now we set it statically to clipper backend
|
||||
#ifdef LIBNEST2D_BACKEND_CLIPPER
|
||||
#include <libnest2d/backends/clipper/geometries.hpp>
|
||||
#endif
|
||||
|
||||
#ifdef LIBNEST2D_OPTIMIZER_NLOPT
|
||||
// We include the stock optimizers for local and global optimization
|
||||
#include <libnest2d/optimizers/nlopt/subplex.hpp> // Local subplex for NfpPlacer
|
||||
#include <libnest2d/optimizers/nlopt/genetic.hpp> // Genetic for min. bounding box
|
||||
#endif
|
||||
|
||||
#include <libnest2d/libnest2d.hpp>
|
||||
#include <libnest2d/placers/bottomleftplacer.hpp>
|
||||
#include <libnest2d/placers/nfpplacer.hpp>
|
||||
#include <libnest2d/selections/firstfit.hpp>
|
||||
#include <libnest2d/selections/filler.hpp>
|
||||
#include <libnest2d/selections/djd_heuristic.hpp>
|
||||
|
||||
namespace libnest2d {
|
||||
|
||||
using Point = PointImpl;
|
||||
using Coord = TCoord<PointImpl>;
|
||||
using Box = _Box<PointImpl>;
|
||||
using Segment = _Segment<PointImpl>;
|
||||
using Circle = _Circle<PointImpl>;
|
||||
|
||||
using Item = _Item<PolygonImpl>;
|
||||
using Rectangle = _Rectangle<PolygonImpl>;
|
||||
using PackGroup = _PackGroup<PolygonImpl>;
|
||||
|
||||
using FillerSelection = selections::_FillerSelection<PolygonImpl>;
|
||||
using FirstFitSelection = selections::_FirstFitSelection<PolygonImpl>;
|
||||
using DJDHeuristic = selections::_DJDHeuristic<PolygonImpl>;
|
||||
|
||||
template<class Bin> // Generic placer for arbitrary bin types
|
||||
using _NfpPlacer = placers::_NofitPolyPlacer<PolygonImpl, Bin>;
|
||||
|
||||
// NfpPlacer is with Box bin
|
||||
using NfpPlacer = _NfpPlacer<Box>;
|
||||
|
||||
// This supports only box shaped bins
|
||||
using BottomLeftPlacer = placers::_BottomLeftPlacer<PolygonImpl>;
|
||||
|
||||
#ifdef LIBNEST2D_STATIC
|
||||
|
||||
extern template class Nester<NfpPlacer, FirstFitSelection>;
|
||||
extern template class Nester<BottomLeftPlacer, FirstFitSelection>;
|
||||
extern template PackGroup Nester<NfpPlacer, FirstFitSelection>::execute(
|
||||
std::vector<Item>::iterator, std::vector<Item>::iterator);
|
||||
extern template PackGroup Nester<BottomLeftPlacer, FirstFitSelection>::execute(
|
||||
std::vector<Item>::iterator, std::vector<Item>::iterator);
|
||||
|
||||
#endif
|
||||
|
||||
template<class Placer = NfpPlacer,
|
||||
class Selector = FirstFitSelection,
|
||||
class Iterator = std::vector<Item>::iterator>
|
||||
void nest(Iterator from, Iterator to,
|
||||
const typename Placer::BinType& bin,
|
||||
Coord dist = 0,
|
||||
const typename Placer::Config& pconf = {},
|
||||
const typename Selector::Config& sconf = {})
|
||||
{
|
||||
_Nester<Placer, Selector> nester(bin, dist, pconf, sconf);
|
||||
nester.execute(from, to);
|
||||
}
|
||||
|
||||
template<class Placer = NfpPlacer,
|
||||
class Selector = FirstFitSelection,
|
||||
class Iterator = std::vector<Item>::iterator>
|
||||
void nest(Iterator from, Iterator to,
|
||||
const typename Placer::BinType& bin,
|
||||
ProgressFunction prg,
|
||||
StopCondition scond = []() { return false; },
|
||||
Coord dist = 0,
|
||||
const typename Placer::Config& pconf = {},
|
||||
const typename Selector::Config& sconf = {})
|
||||
{
|
||||
_Nester<Placer, Selector> nester(bin, dist, pconf, sconf);
|
||||
if(prg) nester.progressIndicator(prg);
|
||||
if(scond) nester.stopCondition(scond);
|
||||
nester.execute(from, to);
|
||||
}
|
||||
|
||||
#ifdef LIBNEST2D_STATIC
|
||||
|
||||
extern template class Nester<NfpPlacer, FirstFitSelection>;
|
||||
extern template class Nester<BottomLeftPlacer, FirstFitSelection>;
|
||||
|
||||
extern template void nest(std::vector<Item>::iterator from,
|
||||
std::vector<Item>::iterator to,
|
||||
const Box& bin,
|
||||
Coord dist = 0,
|
||||
const NfpPlacer::Config& pconf,
|
||||
const FirstFitSelection::Config& sconf);
|
||||
|
||||
extern template void nest(std::vector<Item>::iterator from,
|
||||
std::vector<Item>::iterator to,
|
||||
const Box& bin,
|
||||
ProgressFunction prg,
|
||||
StopCondition scond,
|
||||
Coord dist = 0,
|
||||
const NfpPlacer::Config& pconf,
|
||||
const FirstFitSelection::Config& sconf);
|
||||
|
||||
#endif
|
||||
|
||||
template<class Placer = NfpPlacer,
|
||||
class Selector = FirstFitSelection,
|
||||
class Container = std::vector<Item>>
|
||||
void nest(Container&& cont,
|
||||
const typename Placer::BinType& bin,
|
||||
Coord dist = 0,
|
||||
const typename Placer::Config& pconf = {},
|
||||
const typename Selector::Config& sconf = {})
|
||||
{
|
||||
nest<Placer, Selector>(cont.begin(), cont.end(), bin, dist, pconf, sconf);
|
||||
}
|
||||
|
||||
template<class Placer = NfpPlacer,
|
||||
class Selector = FirstFitSelection,
|
||||
class Container = std::vector<Item>>
|
||||
void nest(Container&& cont,
|
||||
const typename Placer::BinType& bin,
|
||||
ProgressFunction prg,
|
||||
StopCondition scond = []() { return false; },
|
||||
Coord dist = 0,
|
||||
const typename Placer::Config& pconf = {},
|
||||
const typename Selector::Config& sconf = {})
|
||||
{
|
||||
nest<Placer, Selector>(cont.begin(), cont.end(), bin, prg, scond, dist,
|
||||
pconf, sconf);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif // LIBNEST2D_H
|
|
@ -1,73 +0,0 @@
|
|||
if(NOT TARGET clipper) # If there is a clipper target in the parent project we are good to go.
|
||||
|
||||
find_package(Clipper 6.1)
|
||||
|
||||
if(NOT CLIPPER_FOUND)
|
||||
find_package(Subversion QUIET)
|
||||
if(Subversion_FOUND)
|
||||
|
||||
set(URL_CLIPPER "https://svn.code.sf.net/p/polyclipping/code/trunk/cpp"
|
||||
CACHE STRING "Clipper source code repository location.")
|
||||
|
||||
message(STATUS "Clipper not found so it will be downloaded.")
|
||||
# Silently download and build the library in the build dir
|
||||
|
||||
if (CMAKE_VERSION VERSION_LESS 3.2)
|
||||
set(UPDATE_DISCONNECTED_IF_AVAILABLE "")
|
||||
else()
|
||||
set(UPDATE_DISCONNECTED_IF_AVAILABLE "UPDATE_DISCONNECTED 1")
|
||||
endif()
|
||||
|
||||
include(DownloadProject)
|
||||
download_project( PROJ clipper_library
|
||||
SVN_REPOSITORY ${URL_CLIPPER}
|
||||
SVN_REVISION -r540
|
||||
#SOURCE_SUBDIR cpp
|
||||
INSTALL_COMMAND ""
|
||||
CONFIGURE_COMMAND "" # Not working, I will just add the source files
|
||||
${UPDATE_DISCONNECTED_IF_AVAILABLE}
|
||||
)
|
||||
|
||||
# This is not working and I dont have time to fix it
|
||||
# add_subdirectory(${clipper_library_SOURCE_DIR}/cpp
|
||||
# ${clipper_library_BINARY_DIR}
|
||||
# )
|
||||
|
||||
add_library(clipperBackend STATIC
|
||||
${clipper_library_SOURCE_DIR}/clipper.cpp
|
||||
${clipper_library_SOURCE_DIR}/clipper.hpp)
|
||||
|
||||
target_include_directories(clipperBackend INTERFACE ${clipper_library_SOURCE_DIR})
|
||||
else()
|
||||
message(FATAL_ERROR "Can't find clipper library and no SVN client found to download.
|
||||
You can download the clipper sources and define a clipper target in your project, that will work for libnest2d.")
|
||||
endif()
|
||||
else()
|
||||
add_library(clipperBackend INTERFACE)
|
||||
target_link_libraries(clipperBackend INTERFACE Clipper::Clipper)
|
||||
endif()
|
||||
else()
|
||||
# set(CLIPPER_INCLUDE_DIRS "" PARENT_SCOPE)
|
||||
# set(CLIPPER_LIBRARIES clipper PARENT_SCOPE)
|
||||
add_library(clipperBackend INTERFACE)
|
||||
target_link_libraries(clipperBackend INTERFACE clipper)
|
||||
endif()
|
||||
|
||||
# Clipper backend is not enough on its own, it still needs some functions
|
||||
# from Boost geometry
|
||||
if(NOT Boost_FOUND)
|
||||
find_package(Boost 1.58 REQUIRED)
|
||||
# TODO automatic download of boost geometry headers
|
||||
endif()
|
||||
|
||||
target_link_libraries(clipperBackend INTERFACE Boost::boost )
|
||||
#target_sources(ClipperBackend INTERFACE
|
||||
# ${CMAKE_CURRENT_SOURCE_DIR}/geometries.hpp
|
||||
# ${CMAKE_CURRENT_SOURCE_DIR}/clipper_polygon.hpp
|
||||
# ${SRC_DIR}/libnest2d/utils/boost_alg.hpp )
|
||||
|
||||
target_compile_definitions(clipperBackend INTERFACE LIBNEST2D_BACKEND_CLIPPER)
|
||||
|
||||
# And finally plug the clipperBackend into libnest2d
|
||||
# target_link_libraries(libnest2d INTERFACE clipperBackend)
|
||||
|
|
@ -299,9 +299,456 @@ inline NfpResult<RawShape> nfpConvexOnly(const RawShape& sh,
|
|||
|
||||
template<class RawShape>
|
||||
NfpResult<RawShape> nfpSimpleSimple(const RawShape& cstationary,
|
||||
const RawShape& cother)
|
||||
const RawShape& cother)
|
||||
{
|
||||
return {};
|
||||
|
||||
// Algorithms are from the original algorithm proposed in paper:
|
||||
// https://eprints.soton.ac.uk/36850/1/CORMSIS-05-05.pdf
|
||||
|
||||
// /////////////////////////////////////////////////////////////////////////
|
||||
// Algorithm 1: Obtaining the minkowski sum
|
||||
// /////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// I guess this is not a full minkowski sum of the two input polygons by
|
||||
// definition. This yields a subset that is compatible with the next 2
|
||||
// algorithms.
|
||||
|
||||
using Result = NfpResult<RawShape>;
|
||||
using Vertex = TPoint<RawShape>;
|
||||
using Coord = TCoord<Vertex>;
|
||||
using Edge = _Segment<Vertex>;
|
||||
namespace sl = shapelike;
|
||||
using std::signbit;
|
||||
using std::sort;
|
||||
using std::vector;
|
||||
using std::ref;
|
||||
using std::reference_wrapper;
|
||||
|
||||
// TODO The original algorithms expects the stationary polygon in
|
||||
// counter clockwise and the orbiter in clockwise order.
|
||||
// So for preventing any further complication, I will make the input
|
||||
// the way it should be, than make my way around the orientations.
|
||||
|
||||
// Reverse the stationary contour to counter clockwise
|
||||
auto stcont = sl::contour(cstationary);
|
||||
{
|
||||
std::reverse(sl::begin(stcont), sl::end(stcont));
|
||||
stcont.pop_back();
|
||||
auto it = std::min_element(sl::begin(stcont), sl::end(stcont),
|
||||
[](const Vertex& v1, const Vertex& v2) {
|
||||
return getY(v1) < getY(v2);
|
||||
});
|
||||
std::rotate(sl::begin(stcont), it, sl::end(stcont));
|
||||
sl::addVertex(stcont, sl::front(stcont));
|
||||
}
|
||||
RawShape stationary;
|
||||
sl::contour(stationary) = stcont;
|
||||
|
||||
// Reverse the orbiter contour to counter clockwise
|
||||
auto orbcont = sl::contour(cother);
|
||||
{
|
||||
std::reverse(orbcont.begin(), orbcont.end());
|
||||
|
||||
// Step 1: Make the orbiter reverse oriented
|
||||
|
||||
orbcont.pop_back();
|
||||
auto it = std::min_element(orbcont.begin(), orbcont.end(),
|
||||
[](const Vertex& v1, const Vertex& v2) {
|
||||
return getY(v1) < getY(v2);
|
||||
});
|
||||
|
||||
std::rotate(orbcont.begin(), it, orbcont.end());
|
||||
orbcont.emplace_back(orbcont.front());
|
||||
|
||||
for(auto &v : orbcont) v = -v;
|
||||
|
||||
}
|
||||
|
||||
// Copy the orbiter (contour only), we will have to work on it
|
||||
RawShape orbiter;
|
||||
sl::contour(orbiter) = orbcont;
|
||||
|
||||
// An edge with additional data for marking it
|
||||
struct MarkedEdge {
|
||||
Edge e; Radians turn_angle = 0; bool is_turning_point = false;
|
||||
MarkedEdge() = default;
|
||||
MarkedEdge(const Edge& ed, Radians ta, bool tp):
|
||||
e(ed), turn_angle(ta), is_turning_point(tp) {}
|
||||
|
||||
// debug
|
||||
std::string label;
|
||||
};
|
||||
|
||||
// Container for marked edges
|
||||
using EdgeList = vector<MarkedEdge>;
|
||||
|
||||
EdgeList A, B;
|
||||
|
||||
// This is how an edge list is created from the polygons
|
||||
auto fillEdgeList = [](EdgeList& L, const RawShape& ppoly, int dir) {
|
||||
auto& poly = sl::contour(ppoly);
|
||||
|
||||
L.reserve(sl::contourVertexCount(poly));
|
||||
|
||||
if(dir > 0) {
|
||||
auto it = poly.begin();
|
||||
auto nextit = std::next(it);
|
||||
|
||||
double turn_angle = 0;
|
||||
bool is_turn_point = false;
|
||||
|
||||
while(nextit != poly.end()) {
|
||||
L.emplace_back(Edge(*it, *nextit), turn_angle, is_turn_point);
|
||||
it++; nextit++;
|
||||
}
|
||||
} else {
|
||||
auto it = sl::rbegin(poly);
|
||||
auto nextit = std::next(it);
|
||||
|
||||
double turn_angle = 0;
|
||||
bool is_turn_point = false;
|
||||
|
||||
while(nextit != sl::rend(poly)) {
|
||||
L.emplace_back(Edge(*it, *nextit), turn_angle, is_turn_point);
|
||||
it++; nextit++;
|
||||
}
|
||||
}
|
||||
|
||||
auto getTurnAngle = [](const Edge& e1, const Edge& e2) {
|
||||
auto phi = e1.angleToXaxis();
|
||||
auto phi_prev = e2.angleToXaxis();
|
||||
auto turn_angle = phi-phi_prev;
|
||||
if(turn_angle > Pi) turn_angle -= TwoPi;
|
||||
if(turn_angle < -Pi) turn_angle += TwoPi;
|
||||
return turn_angle;
|
||||
};
|
||||
|
||||
auto eit = L.begin();
|
||||
auto enext = std::next(eit);
|
||||
|
||||
eit->turn_angle = getTurnAngle(L.front().e, L.back().e);
|
||||
|
||||
while(enext != L.end()) {
|
||||
enext->turn_angle = getTurnAngle( enext->e, eit->e);
|
||||
eit->is_turning_point =
|
||||
signbit(enext->turn_angle) != signbit(eit->turn_angle);
|
||||
++eit; ++enext;
|
||||
}
|
||||
|
||||
L.back().is_turning_point = signbit(L.back().turn_angle) !=
|
||||
signbit(L.front().turn_angle);
|
||||
|
||||
};
|
||||
|
||||
// Step 2: Fill the edgelists
|
||||
fillEdgeList(A, stationary, 1);
|
||||
fillEdgeList(B, orbiter, 1);
|
||||
|
||||
int i = 1;
|
||||
for(MarkedEdge& me : A) {
|
||||
std::cout << "a" << i << ":\n\t"
|
||||
<< getX(me.e.first()) << " " << getY(me.e.first()) << "\n\t"
|
||||
<< getX(me.e.second()) << " " << getY(me.e.second()) << "\n\t"
|
||||
<< "Turning point: " << (me.is_turning_point ? "yes" : "no")
|
||||
<< std::endl;
|
||||
|
||||
me.label = "a"; me.label += std::to_string(i);
|
||||
i++;
|
||||
}
|
||||
|
||||
i = 1;
|
||||
for(MarkedEdge& me : B) {
|
||||
std::cout << "b" << i << ":\n\t"
|
||||
<< getX(me.e.first()) << " " << getY(me.e.first()) << "\n\t"
|
||||
<< getX(me.e.second()) << " " << getY(me.e.second()) << "\n\t"
|
||||
<< "Turning point: " << (me.is_turning_point ? "yes" : "no")
|
||||
<< std::endl;
|
||||
me.label = "b"; me.label += std::to_string(i);
|
||||
i++;
|
||||
}
|
||||
|
||||
// A reference to a marked edge that also knows its container
|
||||
struct MarkedEdgeRef {
|
||||
reference_wrapper<MarkedEdge> eref;
|
||||
reference_wrapper<vector<MarkedEdgeRef>> container;
|
||||
Coord dir = 1; // Direction modifier
|
||||
|
||||
inline Radians angleX() const { return eref.get().e.angleToXaxis(); }
|
||||
inline const Edge& edge() const { return eref.get().e; }
|
||||
inline Edge& edge() { return eref.get().e; }
|
||||
inline bool isTurningPoint() const {
|
||||
return eref.get().is_turning_point;
|
||||
}
|
||||
inline bool isFrom(const vector<MarkedEdgeRef>& cont ) {
|
||||
return &(container.get()) == &cont;
|
||||
}
|
||||
inline bool eq(const MarkedEdgeRef& mr) {
|
||||
return &(eref.get()) == &(mr.eref.get());
|
||||
}
|
||||
|
||||
MarkedEdgeRef(reference_wrapper<MarkedEdge> er,
|
||||
reference_wrapper<vector<MarkedEdgeRef>> ec):
|
||||
eref(er), container(ec), dir(1) {}
|
||||
|
||||
MarkedEdgeRef(reference_wrapper<MarkedEdge> er,
|
||||
reference_wrapper<vector<MarkedEdgeRef>> ec,
|
||||
Coord d):
|
||||
eref(er), container(ec), dir(d) {}
|
||||
};
|
||||
|
||||
using EdgeRefList = vector<MarkedEdgeRef>;
|
||||
|
||||
// Comparing two marked edges
|
||||
auto sortfn = [](const MarkedEdgeRef& e1, const MarkedEdgeRef& e2) {
|
||||
return e1.angleX() < e2.angleX();
|
||||
};
|
||||
|
||||
EdgeRefList Aref, Bref; // We create containers for the references
|
||||
Aref.reserve(A.size()); Bref.reserve(B.size());
|
||||
|
||||
// Fill reference container for the stationary polygon
|
||||
std::for_each(A.begin(), A.end(), [&Aref](MarkedEdge& me) {
|
||||
Aref.emplace_back( ref(me), ref(Aref) );
|
||||
});
|
||||
|
||||
// Fill reference container for the orbiting polygon
|
||||
std::for_each(B.begin(), B.end(), [&Bref](MarkedEdge& me) {
|
||||
Bref.emplace_back( ref(me), ref(Bref) );
|
||||
});
|
||||
|
||||
auto mink = [sortfn] // the Mink(Q, R, direction) sub-procedure
|
||||
(const EdgeRefList& Q, const EdgeRefList& R, bool positive)
|
||||
{
|
||||
|
||||
// Step 1 "merge sort_list(Q) and sort_list(R) to form merge_list(Q,R)"
|
||||
// Sort the containers of edge references and merge them.
|
||||
// Q could be sorted only once and be reused here but we would still
|
||||
// need to merge it with sorted(R).
|
||||
|
||||
EdgeRefList merged;
|
||||
EdgeRefList S, seq;
|
||||
merged.reserve(Q.size() + R.size());
|
||||
|
||||
merged.insert(merged.end(), R.begin(), R.end());
|
||||
std::stable_sort(merged.begin(), merged.end(), sortfn);
|
||||
merged.insert(merged.end(), Q.begin(), Q.end());
|
||||
std::stable_sort(merged.begin(), merged.end(), sortfn);
|
||||
|
||||
// Step 2 "set i = 1, k = 1, direction = 1, s1 = q1"
|
||||
// we don't use i, instead, q is an iterator into Q. k would be an index
|
||||
// into the merged sequence but we use "it" as an iterator for that
|
||||
|
||||
// here we obtain references for the containers for later comparisons
|
||||
const auto& Rcont = R.begin()->container.get();
|
||||
const auto& Qcont = Q.begin()->container.get();
|
||||
|
||||
// Set the initial direction
|
||||
Coord dir = 1;
|
||||
|
||||
// roughly i = 1 (so q = Q.begin()) and s1 = q1 so S[0] = q;
|
||||
if(positive) {
|
||||
auto q = Q.begin();
|
||||
S.emplace_back(*q);
|
||||
|
||||
// Roughly step 3
|
||||
|
||||
std::cout << "merged size: " << merged.size() << std::endl;
|
||||
auto mit = merged.begin();
|
||||
for(bool finish = false; !finish && q != Q.end();) {
|
||||
++q; // "Set i = i + 1"
|
||||
|
||||
while(!finish && mit != merged.end()) {
|
||||
if(mit->isFrom(Rcont)) {
|
||||
auto s = *mit;
|
||||
s.dir = dir;
|
||||
S.emplace_back(s);
|
||||
}
|
||||
|
||||
if(mit->eq(*q)) {
|
||||
S.emplace_back(*q);
|
||||
if(mit->isTurningPoint()) dir = -dir;
|
||||
if(q == Q.begin()) finish = true;
|
||||
break;
|
||||
}
|
||||
|
||||
mit += dir;
|
||||
// __nfp::advance(mit, merged, dir > 0);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
auto q = Q.rbegin();
|
||||
S.emplace_back(*q);
|
||||
|
||||
// Roughly step 3
|
||||
|
||||
std::cout << "merged size: " << merged.size() << std::endl;
|
||||
auto mit = merged.begin();
|
||||
for(bool finish = false; !finish && q != Q.rend();) {
|
||||
++q; // "Set i = i + 1"
|
||||
|
||||
while(!finish && mit != merged.end()) {
|
||||
if(mit->isFrom(Rcont)) {
|
||||
auto s = *mit;
|
||||
s.dir = dir;
|
||||
S.emplace_back(s);
|
||||
}
|
||||
|
||||
if(mit->eq(*q)) {
|
||||
S.emplace_back(*q);
|
||||
S.back().dir = -1;
|
||||
if(mit->isTurningPoint()) dir = -dir;
|
||||
if(q == Q.rbegin()) finish = true;
|
||||
break;
|
||||
}
|
||||
|
||||
mit += dir;
|
||||
// __nfp::advance(mit, merged, dir > 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Step 4:
|
||||
|
||||
// "Let starting edge r1 be in position si in sequence"
|
||||
// whaaat? I guess this means the following:
|
||||
auto it = S.begin();
|
||||
while(!it->eq(*R.begin())) ++it;
|
||||
|
||||
// "Set j = 1, next = 2, direction = 1, seq1 = si"
|
||||
// we don't use j, seq is expanded dynamically.
|
||||
dir = 1;
|
||||
auto next = std::next(R.begin()); seq.emplace_back(*it);
|
||||
|
||||
// Step 5:
|
||||
// "If all si edges have been allocated to seqj" should mean that
|
||||
// we loop until seq has equal size with S
|
||||
auto send = it; //it == S.begin() ? it : std::prev(it);
|
||||
while(it != S.end()) {
|
||||
++it; if(it == S.end()) it = S.begin();
|
||||
if(it == send) break;
|
||||
|
||||
if(it->isFrom(Qcont)) {
|
||||
seq.emplace_back(*it); // "If si is from Q, j = j + 1, seqj = si"
|
||||
|
||||
// "If si is a turning point in Q,
|
||||
// direction = - direction, next = next + direction"
|
||||
if(it->isTurningPoint()) {
|
||||
dir = -dir;
|
||||
next += dir;
|
||||
// __nfp::advance(next, R, dir > 0);
|
||||
}
|
||||
}
|
||||
|
||||
if(it->eq(*next) /*&& dir == next->dir*/) { // "If si = direction.rnext"
|
||||
// "j = j + 1, seqj = si, next = next + direction"
|
||||
seq.emplace_back(*it);
|
||||
next += dir;
|
||||
// __nfp::advance(next, R, dir > 0);
|
||||
}
|
||||
}
|
||||
|
||||
return seq;
|
||||
};
|
||||
|
||||
std::vector<EdgeRefList> seqlist;
|
||||
seqlist.reserve(Bref.size());
|
||||
|
||||
EdgeRefList Bslope = Bref; // copy Bref, we will make a slope diagram
|
||||
|
||||
// make the slope diagram of B
|
||||
std::sort(Bslope.begin(), Bslope.end(), sortfn);
|
||||
|
||||
auto slopeit = Bslope.begin(); // search for the first turning point
|
||||
while(!slopeit->isTurningPoint() && slopeit != Bslope.end()) slopeit++;
|
||||
|
||||
if(slopeit == Bslope.end()) {
|
||||
// no turning point means convex polygon.
|
||||
seqlist.emplace_back(mink(Aref, Bref, true));
|
||||
} else {
|
||||
int dir = 1;
|
||||
|
||||
auto firstturn = Bref.begin();
|
||||
while(!firstturn->eq(*slopeit)) ++firstturn;
|
||||
|
||||
assert(firstturn != Bref.end());
|
||||
|
||||
EdgeRefList bgroup; bgroup.reserve(Bref.size());
|
||||
bgroup.emplace_back(*slopeit);
|
||||
|
||||
auto b_it = std::next(firstturn);
|
||||
while(b_it != firstturn) {
|
||||
if(b_it == Bref.end()) b_it = Bref.begin();
|
||||
|
||||
while(!slopeit->eq(*b_it)) {
|
||||
__nfp::advance(slopeit, Bslope, dir > 0);
|
||||
}
|
||||
|
||||
if(!slopeit->isTurningPoint()) {
|
||||
bgroup.emplace_back(*slopeit);
|
||||
} else {
|
||||
if(!bgroup.empty()) {
|
||||
if(dir > 0) bgroup.emplace_back(*slopeit);
|
||||
for(auto& me : bgroup) {
|
||||
std::cout << me.eref.get().label << ", ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
seqlist.emplace_back(mink(Aref, bgroup, dir == 1 ? true : false));
|
||||
bgroup.clear();
|
||||
if(dir < 0) bgroup.emplace_back(*slopeit);
|
||||
} else {
|
||||
bgroup.emplace_back(*slopeit);
|
||||
}
|
||||
|
||||
dir *= -1;
|
||||
}
|
||||
++b_it;
|
||||
}
|
||||
}
|
||||
|
||||
// while(it != Bref.end()) // This is step 3 and step 4 in one loop
|
||||
// if(it->isTurningPoint()) {
|
||||
// R = {R.last, it++};
|
||||
// auto seq = mink(Q, R, orientation);
|
||||
|
||||
// // TODO step 6 (should be 5 shouldn't it?): linking edges from A
|
||||
// // I don't get this step
|
||||
|
||||
// seqlist.insert(seqlist.end(), seq.begin(), seq.end());
|
||||
// orientation = !orientation;
|
||||
// } else ++it;
|
||||
|
||||
// if(seqlist.empty()) seqlist = mink(Q, {Bref.begin(), Bref.end()}, true);
|
||||
|
||||
// /////////////////////////////////////////////////////////////////////////
|
||||
// Algorithm 2: breaking Minkowski sums into track line trips
|
||||
// /////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
// /////////////////////////////////////////////////////////////////////////
|
||||
// Algorithm 3: finding the boundary of the NFP from track line trips
|
||||
// /////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
for(auto& seq : seqlist) {
|
||||
std::cout << "seqlist size: " << seq.size() << std::endl;
|
||||
for(auto& s : seq) {
|
||||
std::cout << (s.dir > 0 ? "" : "-") << s.eref.get().label << ", ";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
auto& seq = seqlist.front();
|
||||
RawShape rsh;
|
||||
Vertex top_nfp;
|
||||
std::vector<Edge> edgelist; edgelist.reserve(seq.size());
|
||||
for(auto& s : seq) {
|
||||
edgelist.emplace_back(s.eref.get().e);
|
||||
}
|
||||
|
||||
__nfp::buildPolygon(edgelist, rsh, top_nfp);
|
||||
|
||||
return Result(rsh, top_nfp);
|
||||
}
|
||||
|
||||
// Specializable NFP implementation class. Specialize it if you have a faster
|
||||
|
|
|
@ -1,862 +1,134 @@
|
|||
#ifndef LIBNEST2D_HPP
|
||||
#define LIBNEST2D_HPP
|
||||
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <array>
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
// The type of backend should be set conditionally by the cmake configuriation
|
||||
// for now we set it statically to clipper backend
|
||||
#ifdef LIBNEST2D_GEOMETRIES_clipper
|
||||
#include <libnest2d/backends/clipper/geometries.hpp>
|
||||
#endif
|
||||
|
||||
#include <libnest2d/geometry_traits.hpp>
|
||||
#ifdef LIBNEST2D_OPTIMIZER_nlopt
|
||||
// We include the stock optimizers for local and global optimization
|
||||
#include <libnest2d/optimizers/nlopt/subplex.hpp> // Local subplex for NfpPlacer
|
||||
#include <libnest2d/optimizers/nlopt/genetic.hpp> // Genetic for min. bounding box
|
||||
#endif
|
||||
|
||||
#include <libnest2d/nester.hpp>
|
||||
#include <libnest2d/placers/bottomleftplacer.hpp>
|
||||
#include <libnest2d/placers/nfpplacer.hpp>
|
||||
#include <libnest2d/selections/firstfit.hpp>
|
||||
#include <libnest2d/selections/filler.hpp>
|
||||
#include <libnest2d/selections/djd_heuristic.hpp>
|
||||
|
||||
namespace libnest2d {
|
||||
|
||||
static const constexpr int BIN_ID_UNSET = -1;
|
||||
using Point = PointImpl;
|
||||
using Coord = TCoord<PointImpl>;
|
||||
using Box = _Box<PointImpl>;
|
||||
using Segment = _Segment<PointImpl>;
|
||||
using Circle = _Circle<PointImpl>;
|
||||
|
||||
/**
|
||||
* \brief An item to be placed on a bin.
|
||||
*
|
||||
* It holds a copy of the original shape object but supports move construction
|
||||
* from the shape objects if its an rvalue reference. This way we can construct
|
||||
* the items without the cost of copying a potentially large amount of input.
|
||||
*
|
||||
* The results of some calculations are cached for maintaining fast run times.
|
||||
* For this reason, memory demands are much higher but this should pay off.
|
||||
*/
|
||||
template<class RawShape>
|
||||
class _Item {
|
||||
using Coord = TCoord<TPoint<RawShape>>;
|
||||
using Vertex = TPoint<RawShape>;
|
||||
using Box = _Box<Vertex>;
|
||||
using Item = _Item<PolygonImpl>;
|
||||
using Rectangle = _Rectangle<PolygonImpl>;
|
||||
using PackGroup = _PackGroup<PolygonImpl>;
|
||||
|
||||
using VertexConstIterator = typename TContour<RawShape>::const_iterator;
|
||||
using FillerSelection = selections::_FillerSelection<PolygonImpl>;
|
||||
using FirstFitSelection = selections::_FirstFitSelection<PolygonImpl>;
|
||||
using DJDHeuristic = selections::_DJDHeuristic<PolygonImpl>;
|
||||
|
||||
// The original shape that gets encapsulated.
|
||||
RawShape sh_;
|
||||
template<class Bin> // Generic placer for arbitrary bin types
|
||||
using _NfpPlacer = placers::_NofitPolyPlacer<PolygonImpl, Bin>;
|
||||
|
||||
// Transformation data
|
||||
Vertex translation_{0, 0};
|
||||
Radians rotation_{0.0};
|
||||
Coord inflation_{0};
|
||||
// NfpPlacer is with Box bin
|
||||
using NfpPlacer = _NfpPlacer<Box>;
|
||||
|
||||
// Info about whether the transformations will have to take place
|
||||
// This is needed because if floating point is used, it is hard to say
|
||||
// that a zero angle is not a rotation because of testing for equality.
|
||||
bool has_rotation_ = false, has_translation_ = false, has_inflation_ = false;
|
||||
// This supports only box shaped bins
|
||||
using BottomLeftPlacer = placers::_BottomLeftPlacer<PolygonImpl>;
|
||||
|
||||
// For caching the calculations as they can get pretty expensive.
|
||||
mutable RawShape tr_cache_;
|
||||
mutable bool tr_cache_valid_ = false;
|
||||
mutable double area_cache_ = 0;
|
||||
mutable bool area_cache_valid_ = false;
|
||||
mutable RawShape inflate_cache_;
|
||||
mutable bool inflate_cache_valid_ = false;
|
||||
#ifdef LIBNEST2D_STATIC
|
||||
|
||||
enum class Convexity: char {
|
||||
UNCHECKED,
|
||||
C_TRUE,
|
||||
C_FALSE
|
||||
};
|
||||
extern template class _Nester<NfpPlacer, FirstFitSelection>;
|
||||
extern template class _Nester<BottomLeftPlacer, FirstFitSelection>;
|
||||
extern template std::size_t _Nester<NfpPlacer, FirstFitSelection>::execute(
|
||||
std::vector<Item>::iterator, std::vector<Item>::iterator);
|
||||
extern template std::size_t _Nester<BottomLeftPlacer, FirstFitSelection>::execute(
|
||||
std::vector<Item>::iterator, std::vector<Item>::iterator);
|
||||
|
||||
mutable Convexity convexity_ = Convexity::UNCHECKED;
|
||||
mutable VertexConstIterator rmt_; // rightmost top vertex
|
||||
mutable VertexConstIterator lmb_; // leftmost bottom vertex
|
||||
mutable bool rmt_valid_ = false, lmb_valid_ = false;
|
||||
mutable struct BBCache {
|
||||
Box bb; bool valid;
|
||||
BBCache(): valid(false) {}
|
||||
} bb_cache_;
|
||||
#endif
|
||||
|
||||
template<class Placer = NfpPlacer, class Selector = FirstFitSelection>
|
||||
struct NestConfig {
|
||||
typename Placer::Config placer_config;
|
||||
typename Selector::Config selector_config;
|
||||
using Placement = typename Placer::Config;
|
||||
using Selection = typename Selector::Config;
|
||||
|
||||
int binid_{BIN_ID_UNSET}, priority_{0};
|
||||
bool fixed_{false};
|
||||
|
||||
public:
|
||||
|
||||
/// The type of the shape which was handed over as the template argument.
|
||||
using ShapeType = RawShape;
|
||||
|
||||
/**
|
||||
* \brief Iterator type for the outer vertices.
|
||||
*
|
||||
* Only const iterators can be used. The _Item type is not intended to
|
||||
* modify the carried shapes from the outside. The main purpose of this type
|
||||
* is to cache the calculation results from the various operators it
|
||||
* supports. Giving out a non const iterator would make it impossible to
|
||||
* perform correct cache invalidation.
|
||||
*/
|
||||
using Iterator = VertexConstIterator;
|
||||
|
||||
/**
|
||||
* @brief Get the orientation of the polygon.
|
||||
*
|
||||
* The orientation have to be specified as a specialization of the
|
||||
* OrientationType struct which has a Value constant.
|
||||
*
|
||||
* @return The orientation type identifier for the _Item type.
|
||||
*/
|
||||
static BP2D_CONSTEXPR Orientation orientation() {
|
||||
return OrientationType<RawShape>::Value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Constructing an _Item form an existing raw shape. The shape will
|
||||
* be copied into the _Item object.
|
||||
* @param sh The original shape object.
|
||||
*/
|
||||
explicit inline _Item(const RawShape& sh): sh_(sh) {}
|
||||
|
||||
/**
|
||||
* @brief Construction of an item by moving the content of the raw shape,
|
||||
* assuming that it supports move semantics.
|
||||
* @param sh The original shape object.
|
||||
*/
|
||||
explicit inline _Item(RawShape&& sh): sh_(std::move(sh)) {}
|
||||
|
||||
/**
|
||||
* @brief Create an item from an initializer list.
|
||||
* @param il The initializer list of vertices.
|
||||
*/
|
||||
inline _Item(const std::initializer_list< Vertex >& il):
|
||||
sh_(sl::create<RawShape>(il)) {}
|
||||
|
||||
inline _Item(const TContour<RawShape>& contour,
|
||||
const THolesContainer<RawShape>& holes = {}):
|
||||
sh_(sl::create<RawShape>(contour, holes)) {}
|
||||
|
||||
inline _Item(TContour<RawShape>&& contour,
|
||||
THolesContainer<RawShape>&& holes):
|
||||
sh_(sl::create<RawShape>(std::move(contour), std::move(holes))) {}
|
||||
|
||||
inline bool isFixed() const noexcept { return fixed_; }
|
||||
inline void markAsFixed(bool fixed = true) { fixed_ = fixed; }
|
||||
|
||||
inline void binId(int idx) { binid_ = idx; }
|
||||
inline int binId() const noexcept { return binid_; }
|
||||
|
||||
inline void priority(int p) { priority_ = p; }
|
||||
inline int priority() const noexcept { return priority_; }
|
||||
|
||||
/**
|
||||
* @brief Convert the polygon to string representation. The format depends
|
||||
* on the implementation of the polygon.
|
||||
* @return
|
||||
*/
|
||||
inline std::string toString() const
|
||||
{
|
||||
return sl::toString(sh_);
|
||||
}
|
||||
|
||||
/// Iterator tho the first contour vertex in the polygon.
|
||||
inline Iterator begin() const
|
||||
{
|
||||
return sl::cbegin(sh_);
|
||||
}
|
||||
|
||||
/// Alias to begin()
|
||||
inline Iterator cbegin() const
|
||||
{
|
||||
return sl::cbegin(sh_);
|
||||
}
|
||||
|
||||
/// Iterator to the last contour vertex.
|
||||
inline Iterator end() const
|
||||
{
|
||||
return sl::cend(sh_);
|
||||
}
|
||||
|
||||
/// Alias to end()
|
||||
inline Iterator cend() const
|
||||
{
|
||||
return sl::cend(sh_);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get a copy of an outer vertex within the carried shape.
|
||||
*
|
||||
* Note that the vertex considered here is taken from the original shape
|
||||
* that this item is constructed from. This means that no transformation is
|
||||
* applied to the shape in this call.
|
||||
*
|
||||
* @param idx The index of the requested vertex.
|
||||
* @return A copy of the requested vertex.
|
||||
*/
|
||||
inline Vertex vertex(unsigned long idx) const
|
||||
{
|
||||
return sl::vertex(sh_, idx);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Modify a vertex.
|
||||
*
|
||||
* Note that this method will invalidate every cached calculation result
|
||||
* including polygon offset and transformations.
|
||||
*
|
||||
* @param idx The index of the requested vertex.
|
||||
* @param v The new vertex data.
|
||||
*/
|
||||
inline void setVertex(unsigned long idx, const Vertex& v )
|
||||
{
|
||||
invalidateCache();
|
||||
sl::vertex(sh_, idx) = v;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Calculate the shape area.
|
||||
*
|
||||
* The method returns absolute value and does not reflect polygon
|
||||
* orientation. The result is cached, subsequent calls will have very little
|
||||
* cost.
|
||||
* @return The shape area in floating point double precision.
|
||||
*/
|
||||
inline double area() const {
|
||||
double ret ;
|
||||
if(area_cache_valid_) ret = area_cache_;
|
||||
else {
|
||||
ret = sl::area(infaltedShape());
|
||||
area_cache_ = ret;
|
||||
area_cache_valid_ = true;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
inline bool isContourConvex() const {
|
||||
bool ret = false;
|
||||
|
||||
switch(convexity_) {
|
||||
case Convexity::UNCHECKED:
|
||||
ret = sl::isConvex(sl::contour(transformedShape()));
|
||||
convexity_ = ret? Convexity::C_TRUE : Convexity::C_FALSE;
|
||||
break;
|
||||
case Convexity::C_TRUE: ret = true; break;
|
||||
case Convexity::C_FALSE:;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
inline bool isHoleConvex(unsigned /*holeidx*/) const {
|
||||
return false;
|
||||
}
|
||||
|
||||
inline bool areHolesConvex() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
/// The number of the outer ring vertices.
|
||||
inline size_t vertexCount() const {
|
||||
return sl::contourVertexCount(sh_);
|
||||
}
|
||||
|
||||
inline size_t holeCount() const {
|
||||
return sl::holeCount(sh_);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief isPointInside
|
||||
* @param p
|
||||
* @return
|
||||
*/
|
||||
inline bool isInside(const Vertex& p) const
|
||||
{
|
||||
return sl::isInside(p, transformedShape());
|
||||
}
|
||||
|
||||
inline bool isInside(const _Item& sh) const
|
||||
{
|
||||
return sl::isInside(transformedShape(), sh.transformedShape());
|
||||
}
|
||||
|
||||
inline bool isInside(const RawShape& sh) const
|
||||
{
|
||||
return sl::isInside(transformedShape(), sh);
|
||||
}
|
||||
|
||||
inline bool isInside(const _Box<TPoint<RawShape>>& box) const;
|
||||
inline bool isInside(const _Circle<TPoint<RawShape>>& box) const;
|
||||
|
||||
inline void translate(const Vertex& d) BP2D_NOEXCEPT
|
||||
{
|
||||
translation(translation() + d);
|
||||
}
|
||||
|
||||
inline void rotate(const Radians& rads) BP2D_NOEXCEPT
|
||||
{
|
||||
rotation(rotation() + rads);
|
||||
}
|
||||
|
||||
inline void inflation(Coord distance) BP2D_NOEXCEPT
|
||||
{
|
||||
inflation_ = distance;
|
||||
has_inflation_ = true;
|
||||
invalidateCache();
|
||||
}
|
||||
|
||||
inline Coord inflation() const BP2D_NOEXCEPT {
|
||||
return inflation_;
|
||||
}
|
||||
|
||||
inline void inflate(Coord distance) BP2D_NOEXCEPT
|
||||
{
|
||||
inflation(inflation() + distance);
|
||||
}
|
||||
|
||||
inline Radians rotation() const BP2D_NOEXCEPT
|
||||
{
|
||||
return rotation_;
|
||||
}
|
||||
|
||||
inline TPoint<RawShape> translation() const BP2D_NOEXCEPT
|
||||
{
|
||||
return translation_;
|
||||
}
|
||||
|
||||
inline void rotation(Radians rot) BP2D_NOEXCEPT
|
||||
{
|
||||
if(rotation_ != rot) {
|
||||
rotation_ = rot; has_rotation_ = true; tr_cache_valid_ = false;
|
||||
rmt_valid_ = false; lmb_valid_ = false;
|
||||
bb_cache_.valid = false;
|
||||
}
|
||||
}
|
||||
|
||||
inline void translation(const TPoint<RawShape>& tr) BP2D_NOEXCEPT
|
||||
{
|
||||
if(translation_ != tr) {
|
||||
translation_ = tr; has_translation_ = true; tr_cache_valid_ = false;
|
||||
//bb_cache_.valid = false;
|
||||
}
|
||||
}
|
||||
|
||||
inline const RawShape& transformedShape() const
|
||||
{
|
||||
if(tr_cache_valid_) return tr_cache_;
|
||||
|
||||
RawShape cpy = infaltedShape();
|
||||
if(has_rotation_) sl::rotate(cpy, rotation_);
|
||||
if(has_translation_) sl::translate(cpy, translation_);
|
||||
tr_cache_ = cpy; tr_cache_valid_ = true;
|
||||
rmt_valid_ = false; lmb_valid_ = false;
|
||||
|
||||
return tr_cache_;
|
||||
}
|
||||
|
||||
inline operator RawShape() const
|
||||
{
|
||||
return transformedShape();
|
||||
}
|
||||
|
||||
inline const RawShape& rawShape() const BP2D_NOEXCEPT
|
||||
{
|
||||
return sh_;
|
||||
}
|
||||
|
||||
inline void resetTransformation() BP2D_NOEXCEPT
|
||||
{
|
||||
has_translation_ = false; has_rotation_ = false; has_inflation_ = false;
|
||||
invalidateCache();
|
||||
}
|
||||
|
||||
inline Box boundingBox() const {
|
||||
if(!bb_cache_.valid) {
|
||||
if(!has_rotation_)
|
||||
bb_cache_.bb = sl::boundingBox(infaltedShape());
|
||||
else {
|
||||
// TODO make sure this works
|
||||
auto rotsh = infaltedShape();
|
||||
sl::rotate(rotsh, rotation_);
|
||||
bb_cache_.bb = sl::boundingBox(rotsh);
|
||||
}
|
||||
bb_cache_.valid = true;
|
||||
}
|
||||
|
||||
auto &bb = bb_cache_.bb; auto &tr = translation_;
|
||||
return {bb.minCorner() + tr, bb.maxCorner() + tr };
|
||||
}
|
||||
|
||||
inline Vertex referenceVertex() const {
|
||||
return rightmostTopVertex();
|
||||
}
|
||||
|
||||
inline Vertex rightmostTopVertex() const {
|
||||
if(!rmt_valid_ || !tr_cache_valid_) { // find max x and max y vertex
|
||||
auto& tsh = transformedShape();
|
||||
rmt_ = std::max_element(sl::cbegin(tsh), sl::cend(tsh), vsort);
|
||||
rmt_valid_ = true;
|
||||
}
|
||||
return *rmt_;
|
||||
}
|
||||
|
||||
inline Vertex leftmostBottomVertex() const {
|
||||
if(!lmb_valid_ || !tr_cache_valid_) { // find min x and min y vertex
|
||||
auto& tsh = transformedShape();
|
||||
lmb_ = std::min_element(sl::cbegin(tsh), sl::cend(tsh), vsort);
|
||||
lmb_valid_ = true;
|
||||
}
|
||||
return *lmb_;
|
||||
}
|
||||
|
||||
//Static methods:
|
||||
|
||||
inline static bool intersects(const _Item& sh1, const _Item& sh2)
|
||||
{
|
||||
return sl::intersects(sh1.transformedShape(),
|
||||
sh2.transformedShape());
|
||||
}
|
||||
|
||||
inline static bool touches(const _Item& sh1, const _Item& sh2)
|
||||
{
|
||||
return sl::touches(sh1.transformedShape(),
|
||||
sh2.transformedShape());
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
inline const RawShape& infaltedShape() const {
|
||||
if(has_inflation_ ) {
|
||||
if(inflate_cache_valid_) return inflate_cache_;
|
||||
|
||||
inflate_cache_ = sh_;
|
||||
sl::offset(inflate_cache_, inflation_);
|
||||
inflate_cache_valid_ = true;
|
||||
return inflate_cache_;
|
||||
}
|
||||
return sh_;
|
||||
}
|
||||
|
||||
inline void invalidateCache() const BP2D_NOEXCEPT
|
||||
{
|
||||
tr_cache_valid_ = false;
|
||||
lmb_valid_ = false; rmt_valid_ = false;
|
||||
area_cache_valid_ = false;
|
||||
inflate_cache_valid_ = false;
|
||||
bb_cache_.valid = false;
|
||||
convexity_ = Convexity::UNCHECKED;
|
||||
}
|
||||
|
||||
static inline bool vsort(const Vertex& v1, const Vertex& v2)
|
||||
{
|
||||
TCompute<Vertex> x1 = getX(v1), x2 = getX(v2);
|
||||
TCompute<Vertex> y1 = getY(v1), y2 = getY(v2);
|
||||
return y1 == y2 ? x1 < x2 : y1 < y2;
|
||||
}
|
||||
NestConfig() = default;
|
||||
NestConfig(const typename Placer::Config &cfg) : placer_config{cfg} {}
|
||||
NestConfig(const typename Selector::Config &cfg) : selector_config{cfg} {}
|
||||
NestConfig(const typename Placer::Config & pcfg,
|
||||
const typename Selector::Config &scfg)
|
||||
: placer_config{pcfg}, selector_config{scfg} {}
|
||||
};
|
||||
|
||||
/**
|
||||
* \brief Subclass of _Item for regular rectangle items.
|
||||
*/
|
||||
template<class RawShape>
|
||||
class _Rectangle: public _Item<RawShape> {
|
||||
using _Item<RawShape>::vertex;
|
||||
using TO = Orientation;
|
||||
public:
|
||||
|
||||
using Unit = TCoord<TPoint<RawShape>>;
|
||||
|
||||
template<TO o = OrientationType<RawShape>::Value>
|
||||
inline _Rectangle(Unit width, Unit height,
|
||||
// disable this ctor if o != CLOCKWISE
|
||||
enable_if_t< o == TO::CLOCKWISE, int> = 0 ):
|
||||
_Item<RawShape>( sl::create<RawShape>( {
|
||||
{0, 0},
|
||||
{0, height},
|
||||
{width, height},
|
||||
{width, 0},
|
||||
{0, 0}
|
||||
} ))
|
||||
{
|
||||
}
|
||||
|
||||
template<TO o = OrientationType<RawShape>::Value>
|
||||
inline _Rectangle(Unit width, Unit height,
|
||||
// disable this ctor if o != COUNTER_CLOCKWISE
|
||||
enable_if_t< o == TO::COUNTER_CLOCKWISE, int> = 0 ):
|
||||
_Item<RawShape>( sl::create<RawShape>( {
|
||||
{0, 0},
|
||||
{width, 0},
|
||||
{width, height},
|
||||
{0, height},
|
||||
{0, 0}
|
||||
} ))
|
||||
{
|
||||
}
|
||||
|
||||
inline Unit width() const BP2D_NOEXCEPT {
|
||||
return getX(vertex(2));
|
||||
}
|
||||
|
||||
inline Unit height() const BP2D_NOEXCEPT {
|
||||
return getY(vertex(2));
|
||||
}
|
||||
struct NestControl {
|
||||
ProgressFunction progressfn;
|
||||
StopCondition stopcond = []{ return false; };
|
||||
|
||||
NestControl() = default;
|
||||
NestControl(ProgressFunction pr) : progressfn{std::move(pr)} {}
|
||||
NestControl(StopCondition sc) : stopcond{std::move(sc)} {}
|
||||
NestControl(ProgressFunction pr, StopCondition sc)
|
||||
: progressfn{std::move(pr)}, stopcond{std::move(sc)}
|
||||
{}
|
||||
};
|
||||
|
||||
template<class RawShape>
|
||||
inline bool _Item<RawShape>::isInside(const _Box<TPoint<RawShape>>& box) const {
|
||||
return sl::isInside(boundingBox(), box);
|
||||
template<class Placer = NfpPlacer,
|
||||
class Selector = FirstFitSelection,
|
||||
class Iterator = std::vector<Item>::iterator>
|
||||
std::size_t nest(Iterator from, Iterator to,
|
||||
const typename Placer::BinType & bin,
|
||||
Coord dist = 0,
|
||||
const NestConfig<Placer, Selector> &cfg = {},
|
||||
NestControl ctl = {})
|
||||
{
|
||||
_Nester<Placer, Selector> nester{bin, dist, cfg.placer_config, cfg.selector_config};
|
||||
if(ctl.progressfn) nester.progressIndicator(ctl.progressfn);
|
||||
if(ctl.stopcond) nester.stopCondition(ctl.stopcond);
|
||||
return nester.execute(from, to);
|
||||
}
|
||||
|
||||
template<class RawShape> inline bool
|
||||
_Item<RawShape>::isInside(const _Circle<TPoint<RawShape>>& circ) const {
|
||||
return sl::isInside(transformedShape(), circ);
|
||||
#ifdef LIBNEST2D_STATIC
|
||||
|
||||
extern template class _Nester<NfpPlacer, FirstFitSelection>;
|
||||
extern template class _Nester<BottomLeftPlacer, FirstFitSelection>;
|
||||
extern template std::size_t nest(std::vector<Item>::iterator from,
|
||||
std::vector<Item>::iterator to,
|
||||
const Box & bin,
|
||||
Coord dist,
|
||||
const NestConfig<NfpPlacer, FirstFitSelection> &cfg,
|
||||
NestControl ctl);
|
||||
extern template std::size_t nest(std::vector<Item>::iterator from,
|
||||
std::vector<Item>::iterator to,
|
||||
const Box & bin,
|
||||
Coord dist,
|
||||
const NestConfig<BottomLeftPlacer, FirstFitSelection> &cfg,
|
||||
NestControl ctl);
|
||||
|
||||
#endif
|
||||
|
||||
template<class Placer = NfpPlacer,
|
||||
class Selector = FirstFitSelection,
|
||||
class Container = std::vector<Item>>
|
||||
std::size_t nest(Container&& cont,
|
||||
const typename Placer::BinType & bin,
|
||||
Coord dist = 0,
|
||||
const NestConfig<Placer, Selector> &cfg = {},
|
||||
NestControl ctl = {})
|
||||
{
|
||||
return nest<Placer, Selector>(cont.begin(), cont.end(), bin, dist, cfg, ctl);
|
||||
}
|
||||
|
||||
template<class RawShape> using _ItemRef = std::reference_wrapper<_Item<RawShape>>;
|
||||
template<class RawShape> using _ItemGroup = std::vector<_ItemRef<RawShape>>;
|
||||
|
||||
/**
|
||||
* \brief A list of packed item vectors. Each vector represents a bin.
|
||||
*/
|
||||
template<class RawShape>
|
||||
using _PackGroup = std::vector<std::vector<_ItemRef<RawShape>>>;
|
||||
|
||||
template<class Iterator>
|
||||
struct ConstItemRange {
|
||||
Iterator from;
|
||||
Iterator to;
|
||||
bool valid = false;
|
||||
|
||||
ConstItemRange() = default;
|
||||
ConstItemRange(Iterator f, Iterator t): from(f), to(t), valid(true) {}
|
||||
};
|
||||
|
||||
template<class Container>
|
||||
inline ConstItemRange<typename Container::const_iterator>
|
||||
rem(typename Container::const_iterator it, const Container& cont) {
|
||||
return {std::next(it), cont.end()};
|
||||
}
|
||||
|
||||
/**
|
||||
* \brief A wrapper interface (trait) class for any placement strategy provider.
|
||||
*
|
||||
* If a client wants to use its own placement algorithm, all it has to do is to
|
||||
* specialize this class template and define all the ten methods it has. It can
|
||||
* use the strategies::PlacerBoilerplace class for creating a new placement
|
||||
* strategy where only the constructor and the trypack method has to be provided
|
||||
* and it will work out of the box.
|
||||
*/
|
||||
template<class PlacementStrategy>
|
||||
class PlacementStrategyLike {
|
||||
PlacementStrategy impl_;
|
||||
public:
|
||||
|
||||
using RawShape = typename PlacementStrategy::ShapeType;
|
||||
|
||||
/// The item type that the placer works with.
|
||||
using Item = _Item<RawShape>;
|
||||
|
||||
/// The placer's config type. Should be a simple struct but can be anything.
|
||||
using Config = typename PlacementStrategy::Config;
|
||||
|
||||
/**
|
||||
* \brief The type of the bin that the placer works with.
|
||||
*
|
||||
* Can be a box or an arbitrary shape or just a width or height without a
|
||||
* second dimension if an infinite bin is considered.
|
||||
*/
|
||||
using BinType = typename PlacementStrategy::BinType;
|
||||
|
||||
/**
|
||||
* \brief Pack result that can be used to accept or discard it. See trypack
|
||||
* method.
|
||||
*/
|
||||
using PackResult = typename PlacementStrategy::PackResult;
|
||||
|
||||
using ItemGroup = _ItemGroup<RawShape>;
|
||||
using DefaultIterator = typename ItemGroup::const_iterator;
|
||||
|
||||
/**
|
||||
* @brief Constructor taking the bin and an optional configuration.
|
||||
* @param bin The bin object whose type is defined by the placement strategy.
|
||||
* @param config The configuration for the particular placer.
|
||||
*/
|
||||
explicit PlacementStrategyLike(const BinType& bin,
|
||||
const Config& config = Config()):
|
||||
impl_(bin)
|
||||
{
|
||||
configure(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Provide a different configuration for the placer.
|
||||
*
|
||||
* Note that it depends on the particular placer implementation how it
|
||||
* reacts to config changes in the middle of a calculation.
|
||||
*
|
||||
* @param config The configuration object defined by the placement strategy.
|
||||
*/
|
||||
inline void configure(const Config& config) { impl_.configure(config); }
|
||||
|
||||
/**
|
||||
* Try to pack an item with a result object that contains the packing
|
||||
* information for later accepting it.
|
||||
*
|
||||
* \param item_store A container of items that are intended to be packed
|
||||
* later. Can be used by the placer to switch tactics. When it's knows that
|
||||
* many items will come a greedy strategy may not be the best.
|
||||
* \param from The iterator to the item from which the packing should start,
|
||||
* including the pointed item
|
||||
* \param count How many items should be packed. If the value is 1, than
|
||||
* just the item pointed to by "from" argument should be packed.
|
||||
*/
|
||||
template<class Iter = DefaultIterator>
|
||||
inline PackResult trypack(
|
||||
Item& item,
|
||||
const ConstItemRange<Iter>& remaining = ConstItemRange<Iter>())
|
||||
{
|
||||
return impl_.trypack(item, remaining);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief A method to accept a previously tried item (or items).
|
||||
*
|
||||
* If the pack result is a failure the method should ignore it.
|
||||
* @param r The result of a previous trypack call.
|
||||
*/
|
||||
inline void accept(PackResult& r) { impl_.accept(r); }
|
||||
|
||||
/**
|
||||
* @brief pack Try to pack and immediately accept it on success.
|
||||
*
|
||||
* A default implementation would be to call
|
||||
* { auto&& r = trypack(...); accept(r); return r; } but we should let the
|
||||
* implementor of the placement strategy to harvest any optimizations from
|
||||
* the absence of an intermediate step. The above version can still be used
|
||||
* in the implementation.
|
||||
*
|
||||
* @param item The item to pack.
|
||||
* @return Returns true if the item was packed or false if it could not be
|
||||
* packed.
|
||||
*/
|
||||
template<class Range = ConstItemRange<DefaultIterator>>
|
||||
inline bool pack(
|
||||
Item& item,
|
||||
const Range& remaining = Range())
|
||||
{
|
||||
return impl_.pack(item, remaining);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method makes possible to "preload" some items into the placer. It
|
||||
* will not move these items but will consider them as already packed.
|
||||
*/
|
||||
inline void preload(const ItemGroup& packeditems)
|
||||
{
|
||||
impl_.preload(packeditems);
|
||||
}
|
||||
|
||||
/// Unpack the last element (remove it from the list of packed items).
|
||||
inline void unpackLast() { impl_.unpackLast(); }
|
||||
|
||||
/// Get the bin object.
|
||||
inline const BinType& bin() const { return impl_.bin(); }
|
||||
|
||||
/// Set a new bin object.
|
||||
inline void bin(const BinType& bin) { impl_.bin(bin); }
|
||||
|
||||
/// Get the packed items.
|
||||
inline ItemGroup getItems() { return impl_.getItems(); }
|
||||
|
||||
/// Clear the packed items so a new session can be started.
|
||||
inline void clearItems() { impl_.clearItems(); }
|
||||
|
||||
inline double filledArea() const { return impl_.filledArea(); }
|
||||
|
||||
};
|
||||
|
||||
// The progress function will be called with the number of placed items
|
||||
using ProgressFunction = std::function<void(unsigned)>;
|
||||
using StopCondition = std::function<bool(void)>;
|
||||
|
||||
/**
|
||||
* A wrapper interface (trait) class for any selections strategy provider.
|
||||
*/
|
||||
template<class SelectionStrategy>
|
||||
class SelectionStrategyLike {
|
||||
SelectionStrategy impl_;
|
||||
public:
|
||||
using RawShape = typename SelectionStrategy::ShapeType;
|
||||
using Item = _Item<RawShape>;
|
||||
using PackGroup = _PackGroup<RawShape>;
|
||||
using Config = typename SelectionStrategy::Config;
|
||||
|
||||
|
||||
/**
|
||||
* @brief Provide a different configuration for the selection strategy.
|
||||
*
|
||||
* Note that it depends on the particular placer implementation how it
|
||||
* reacts to config changes in the middle of a calculation.
|
||||
*
|
||||
* @param config The configuration object defined by the selection strategy.
|
||||
*/
|
||||
inline void configure(const Config& config) {
|
||||
impl_.configure(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief A function callback which should be called whenever an item or
|
||||
* a group of items where successfully packed.
|
||||
* @param fn A function callback object taking one unsigned integer as the
|
||||
* number of the remaining items to pack.
|
||||
*/
|
||||
void progressIndicator(ProgressFunction fn) { impl_.progressIndicator(fn); }
|
||||
|
||||
void stopCondition(StopCondition cond) { impl_.stopCondition(cond); }
|
||||
|
||||
/**
|
||||
* \brief A method to start the calculation on the input sequence.
|
||||
*
|
||||
* \tparam TPlacer The only mandatory template parameter is the type of
|
||||
* placer compatible with the PlacementStrategyLike interface.
|
||||
*
|
||||
* \param first, last The first and last iterator if the input sequence. It
|
||||
* can be only an iterator of a type convertible to Item.
|
||||
* \param bin. The shape of the bin. It has to be supported by the placement
|
||||
* strategy.
|
||||
* \param An optional config object for the placer.
|
||||
*/
|
||||
template<class TPlacer, class TIterator,
|
||||
class TBin = typename PlacementStrategyLike<TPlacer>::BinType,
|
||||
class PConfig = typename PlacementStrategyLike<TPlacer>::Config>
|
||||
inline void packItems(
|
||||
TIterator first,
|
||||
TIterator last,
|
||||
TBin&& bin,
|
||||
PConfig&& config = PConfig() )
|
||||
{
|
||||
impl_.template packItems<TPlacer>(first, last,
|
||||
std::forward<TBin>(bin),
|
||||
std::forward<PConfig>(config));
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get the items for a particular bin.
|
||||
* @param binIndex The index of the requested bin.
|
||||
* @return Returns a list of all items packed into the requested bin.
|
||||
*/
|
||||
inline const PackGroup& getResult() const {
|
||||
return impl_.getResult();
|
||||
}
|
||||
|
||||
void clear() { impl_.clear(); }
|
||||
};
|
||||
|
||||
/**
|
||||
* The _Nester is the front-end class for the libnest2d library. It takes the
|
||||
* input items and changes their transformations to be inside the provided bin.
|
||||
*/
|
||||
template<class PlacementStrategy, class SelectionStrategy >
|
||||
class _Nester {
|
||||
using TSel = SelectionStrategyLike<SelectionStrategy>;
|
||||
TSel selector_;
|
||||
public:
|
||||
using Item = typename PlacementStrategy::Item;
|
||||
using ShapeType = typename Item::ShapeType;
|
||||
using ItemRef = std::reference_wrapper<Item>;
|
||||
using TPlacer = PlacementStrategyLike<PlacementStrategy>;
|
||||
using BinType = typename TPlacer::BinType;
|
||||
using PlacementConfig = typename TPlacer::Config;
|
||||
using SelectionConfig = typename TSel::Config;
|
||||
using Coord = TCoord<TPoint<typename Item::ShapeType>>;
|
||||
using PackGroup = _PackGroup<typename Item::ShapeType>;
|
||||
using ResultType = PackGroup;
|
||||
|
||||
private:
|
||||
BinType bin_;
|
||||
PlacementConfig pconfig_;
|
||||
Coord min_obj_distance_;
|
||||
|
||||
using SItem = typename SelectionStrategy::Item;
|
||||
using TPItem = remove_cvref_t<Item>;
|
||||
using TSItem = remove_cvref_t<SItem>;
|
||||
|
||||
StopCondition stopfn_;
|
||||
|
||||
template<class It> using TVal = remove_ref_t<typename It::value_type>;
|
||||
|
||||
template<class It, class Out>
|
||||
using ItemIteratorOnly =
|
||||
enable_if_t<std::is_convertible<TVal<It>&, TPItem&>::value, Out>;
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* \brief Constructor taking the bin as the only mandatory parameter.
|
||||
*
|
||||
* \param bin The bin shape that will be used by the placers. The type
|
||||
* of the bin should be one that is supported by the placer type.
|
||||
*/
|
||||
template<class TBinType = BinType,
|
||||
class PConf = PlacementConfig,
|
||||
class SConf = SelectionConfig>
|
||||
_Nester(TBinType&& bin, Coord min_obj_distance = 0,
|
||||
const PConf& pconfig = PConf(), const SConf& sconfig = SConf()):
|
||||
bin_(std::forward<TBinType>(bin)),
|
||||
pconfig_(pconfig),
|
||||
min_obj_distance_(min_obj_distance)
|
||||
{
|
||||
static_assert( std::is_same<TPItem, TSItem>::value,
|
||||
"Incompatible placement and selection strategy!");
|
||||
|
||||
selector_.configure(sconfig);
|
||||
}
|
||||
|
||||
void configure(const PlacementConfig& pconf) { pconfig_ = pconf; }
|
||||
void configure(const SelectionConfig& sconf) { selector_.configure(sconf); }
|
||||
void configure(const PlacementConfig& pconf, const SelectionConfig& sconf)
|
||||
{
|
||||
pconfig_ = pconf;
|
||||
selector_.configure(sconf);
|
||||
}
|
||||
void configure(const SelectionConfig& sconf, const PlacementConfig& pconf)
|
||||
{
|
||||
pconfig_ = pconf;
|
||||
selector_.configure(sconf);
|
||||
}
|
||||
|
||||
/**
|
||||
* \brief Arrange an input sequence of _Item-s.
|
||||
*
|
||||
* To get the result, call the translation(), rotation() and binId()
|
||||
* methods of each item. If only the transformed polygon is needed, call
|
||||
* transformedShape() to get the properly transformed shapes.
|
||||
*
|
||||
* The number of groups in the pack group is the number of bins opened by
|
||||
* the selection algorithm.
|
||||
*/
|
||||
template<class It>
|
||||
inline ItemIteratorOnly<It, void> execute(It from, It to)
|
||||
{
|
||||
auto infl = static_cast<Coord>(std::ceil(min_obj_distance_/2.0));
|
||||
if(infl > 0) std::for_each(from, to, [this, infl](Item& item) {
|
||||
item.inflate(infl);
|
||||
});
|
||||
|
||||
selector_.template packItems<PlacementStrategy>(
|
||||
from, to, bin_, pconfig_);
|
||||
|
||||
if(min_obj_distance_ > 0) std::for_each(from, to, [infl](Item& item) {
|
||||
item.inflate(-infl);
|
||||
});
|
||||
}
|
||||
|
||||
/// Set a progress indicator function object for the selector.
|
||||
inline _Nester& progressIndicator(ProgressFunction func)
|
||||
{
|
||||
selector_.progressIndicator(func); return *this;
|
||||
}
|
||||
|
||||
/// Set a predicate to tell when to abort nesting.
|
||||
inline _Nester& stopCondition(StopCondition fn)
|
||||
{
|
||||
stopfn_ = fn; selector_.stopCondition(fn); return *this;
|
||||
}
|
||||
|
||||
inline const PackGroup& lastResult() const
|
||||
{
|
||||
return selector_.getResult();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // LIBNEST2D_HPP
|
||||
|
|
869
src/libnest2d/include/libnest2d/nester.hpp
Normal file
869
src/libnest2d/include/libnest2d/nester.hpp
Normal file
|
@ -0,0 +1,869 @@
|
|||
#ifndef NESTER_HPP
|
||||
#define NESTER_HPP
|
||||
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <array>
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
|
||||
#include <libnest2d/geometry_traits.hpp>
|
||||
|
||||
namespace libnest2d {
|
||||
|
||||
static const constexpr int BIN_ID_UNSET = -1;
|
||||
|
||||
/**
|
||||
* \brief An item to be placed on a bin.
|
||||
*
|
||||
* It holds a copy of the original shape object but supports move construction
|
||||
* from the shape objects if its an rvalue reference. This way we can construct
|
||||
* the items without the cost of copying a potentially large amount of input.
|
||||
*
|
||||
* The results of some calculations are cached for maintaining fast run times.
|
||||
* For this reason, memory demands are much higher but this should pay off.
|
||||
*/
|
||||
template<class RawShape>
|
||||
class _Item {
|
||||
using Coord = TCoord<TPoint<RawShape>>;
|
||||
using Vertex = TPoint<RawShape>;
|
||||
using Box = _Box<Vertex>;
|
||||
|
||||
using VertexConstIterator = typename TContour<RawShape>::const_iterator;
|
||||
|
||||
// The original shape that gets encapsulated.
|
||||
RawShape sh_;
|
||||
|
||||
// Transformation data
|
||||
Vertex translation_{0, 0};
|
||||
Radians rotation_{0.0};
|
||||
Coord inflation_{0};
|
||||
|
||||
// Info about whether the transformations will have to take place
|
||||
// This is needed because if floating point is used, it is hard to say
|
||||
// that a zero angle is not a rotation because of testing for equality.
|
||||
bool has_rotation_ = false, has_translation_ = false, has_inflation_ = false;
|
||||
|
||||
// For caching the calculations as they can get pretty expensive.
|
||||
mutable RawShape tr_cache_;
|
||||
mutable bool tr_cache_valid_ = false;
|
||||
mutable double area_cache_ = 0;
|
||||
mutable bool area_cache_valid_ = false;
|
||||
mutable RawShape inflate_cache_;
|
||||
mutable bool inflate_cache_valid_ = false;
|
||||
|
||||
enum class Convexity: char {
|
||||
UNCHECKED,
|
||||
C_TRUE,
|
||||
C_FALSE
|
||||
};
|
||||
|
||||
mutable Convexity convexity_ = Convexity::UNCHECKED;
|
||||
mutable VertexConstIterator rmt_; // rightmost top vertex
|
||||
mutable VertexConstIterator lmb_; // leftmost bottom vertex
|
||||
mutable bool rmt_valid_ = false, lmb_valid_ = false;
|
||||
mutable struct BBCache {
|
||||
Box bb; bool valid;
|
||||
BBCache(): valid(false) {}
|
||||
} bb_cache_;
|
||||
|
||||
int binid_{BIN_ID_UNSET}, priority_{0};
|
||||
bool fixed_{false};
|
||||
|
||||
public:
|
||||
|
||||
/// The type of the shape which was handed over as the template argument.
|
||||
using ShapeType = RawShape;
|
||||
|
||||
/**
|
||||
* \brief Iterator type for the outer vertices.
|
||||
*
|
||||
* Only const iterators can be used. The _Item type is not intended to
|
||||
* modify the carried shapes from the outside. The main purpose of this type
|
||||
* is to cache the calculation results from the various operators it
|
||||
* supports. Giving out a non const iterator would make it impossible to
|
||||
* perform correct cache invalidation.
|
||||
*/
|
||||
using Iterator = VertexConstIterator;
|
||||
|
||||
/**
|
||||
* @brief Get the orientation of the polygon.
|
||||
*
|
||||
* The orientation have to be specified as a specialization of the
|
||||
* OrientationType struct which has a Value constant.
|
||||
*
|
||||
* @return The orientation type identifier for the _Item type.
|
||||
*/
|
||||
static BP2D_CONSTEXPR Orientation orientation() {
|
||||
return OrientationType<RawShape>::Value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Constructing an _Item form an existing raw shape. The shape will
|
||||
* be copied into the _Item object.
|
||||
* @param sh The original shape object.
|
||||
*/
|
||||
explicit inline _Item(const RawShape& sh): sh_(sh) {}
|
||||
|
||||
/**
|
||||
* @brief Construction of an item by moving the content of the raw shape,
|
||||
* assuming that it supports move semantics.
|
||||
* @param sh The original shape object.
|
||||
*/
|
||||
explicit inline _Item(RawShape&& sh): sh_(std::move(sh)) {}
|
||||
|
||||
/**
|
||||
* @brief Create an item from an initializer list.
|
||||
* @param il The initializer list of vertices.
|
||||
*/
|
||||
inline _Item(const std::initializer_list< Vertex >& il):
|
||||
sh_(sl::create<RawShape>(il)) {}
|
||||
|
||||
inline _Item(const TContour<RawShape>& contour,
|
||||
const THolesContainer<RawShape>& holes = {}):
|
||||
sh_(sl::create<RawShape>(contour, holes)) {}
|
||||
|
||||
inline _Item(TContour<RawShape>&& contour,
|
||||
THolesContainer<RawShape>&& holes):
|
||||
sh_(sl::create<RawShape>(std::move(contour), std::move(holes))) {}
|
||||
|
||||
inline bool isFixed() const noexcept { return fixed_; }
|
||||
inline void markAsFixedInBin(int binid)
|
||||
{
|
||||
fixed_ = binid >= 0;
|
||||
binid_ = binid;
|
||||
}
|
||||
|
||||
inline void binId(int idx) { binid_ = idx; }
|
||||
inline int binId() const noexcept { return binid_; }
|
||||
|
||||
inline void priority(int p) { priority_ = p; }
|
||||
inline int priority() const noexcept { return priority_; }
|
||||
|
||||
/**
|
||||
* @brief Convert the polygon to string representation. The format depends
|
||||
* on the implementation of the polygon.
|
||||
* @return
|
||||
*/
|
||||
inline std::string toString() const
|
||||
{
|
||||
return sl::toString(sh_);
|
||||
}
|
||||
|
||||
/// Iterator tho the first contour vertex in the polygon.
|
||||
inline Iterator begin() const
|
||||
{
|
||||
return sl::cbegin(sh_);
|
||||
}
|
||||
|
||||
/// Alias to begin()
|
||||
inline Iterator cbegin() const
|
||||
{
|
||||
return sl::cbegin(sh_);
|
||||
}
|
||||
|
||||
/// Iterator to the last contour vertex.
|
||||
inline Iterator end() const
|
||||
{
|
||||
return sl::cend(sh_);
|
||||
}
|
||||
|
||||
/// Alias to end()
|
||||
inline Iterator cend() const
|
||||
{
|
||||
return sl::cend(sh_);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get a copy of an outer vertex within the carried shape.
|
||||
*
|
||||
* Note that the vertex considered here is taken from the original shape
|
||||
* that this item is constructed from. This means that no transformation is
|
||||
* applied to the shape in this call.
|
||||
*
|
||||
* @param idx The index of the requested vertex.
|
||||
* @return A copy of the requested vertex.
|
||||
*/
|
||||
inline Vertex vertex(unsigned long idx) const
|
||||
{
|
||||
return sl::vertex(sh_, idx);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Modify a vertex.
|
||||
*
|
||||
* Note that this method will invalidate every cached calculation result
|
||||
* including polygon offset and transformations.
|
||||
*
|
||||
* @param idx The index of the requested vertex.
|
||||
* @param v The new vertex data.
|
||||
*/
|
||||
inline void setVertex(unsigned long idx, const Vertex& v )
|
||||
{
|
||||
invalidateCache();
|
||||
sl::vertex(sh_, idx) = v;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Calculate the shape area.
|
||||
*
|
||||
* The method returns absolute value and does not reflect polygon
|
||||
* orientation. The result is cached, subsequent calls will have very little
|
||||
* cost.
|
||||
* @return The shape area in floating point double precision.
|
||||
*/
|
||||
inline double area() const {
|
||||
double ret ;
|
||||
if(area_cache_valid_) ret = area_cache_;
|
||||
else {
|
||||
ret = sl::area(infaltedShape());
|
||||
area_cache_ = ret;
|
||||
area_cache_valid_ = true;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
inline bool isContourConvex() const {
|
||||
bool ret = false;
|
||||
|
||||
switch(convexity_) {
|
||||
case Convexity::UNCHECKED:
|
||||
ret = sl::isConvex(sl::contour(transformedShape()));
|
||||
convexity_ = ret? Convexity::C_TRUE : Convexity::C_FALSE;
|
||||
break;
|
||||
case Convexity::C_TRUE: ret = true; break;
|
||||
case Convexity::C_FALSE:;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
inline bool isHoleConvex(unsigned /*holeidx*/) const {
|
||||
return false;
|
||||
}
|
||||
|
||||
inline bool areHolesConvex() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
/// The number of the outer ring vertices.
|
||||
inline size_t vertexCount() const {
|
||||
return sl::contourVertexCount(sh_);
|
||||
}
|
||||
|
||||
inline size_t holeCount() const {
|
||||
return sl::holeCount(sh_);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief isPointInside
|
||||
* @param p
|
||||
* @return
|
||||
*/
|
||||
inline bool isInside(const Vertex& p) const
|
||||
{
|
||||
return sl::isInside(p, transformedShape());
|
||||
}
|
||||
|
||||
inline bool isInside(const _Item& sh) const
|
||||
{
|
||||
return sl::isInside(transformedShape(), sh.transformedShape());
|
||||
}
|
||||
|
||||
inline bool isInside(const RawShape& sh) const
|
||||
{
|
||||
return sl::isInside(transformedShape(), sh);
|
||||
}
|
||||
|
||||
inline bool isInside(const _Box<TPoint<RawShape>>& box) const;
|
||||
inline bool isInside(const _Circle<TPoint<RawShape>>& box) const;
|
||||
|
||||
inline void translate(const Vertex& d) BP2D_NOEXCEPT
|
||||
{
|
||||
translation(translation() + d);
|
||||
}
|
||||
|
||||
inline void rotate(const Radians& rads) BP2D_NOEXCEPT
|
||||
{
|
||||
rotation(rotation() + rads);
|
||||
}
|
||||
|
||||
inline void inflation(Coord distance) BP2D_NOEXCEPT
|
||||
{
|
||||
inflation_ = distance;
|
||||
has_inflation_ = true;
|
||||
invalidateCache();
|
||||
}
|
||||
|
||||
inline Coord inflation() const BP2D_NOEXCEPT {
|
||||
return inflation_;
|
||||
}
|
||||
|
||||
inline void inflate(Coord distance) BP2D_NOEXCEPT
|
||||
{
|
||||
inflation(inflation() + distance);
|
||||
}
|
||||
|
||||
inline Radians rotation() const BP2D_NOEXCEPT
|
||||
{
|
||||
return rotation_;
|
||||
}
|
||||
|
||||
inline TPoint<RawShape> translation() const BP2D_NOEXCEPT
|
||||
{
|
||||
return translation_;
|
||||
}
|
||||
|
||||
inline void rotation(Radians rot) BP2D_NOEXCEPT
|
||||
{
|
||||
if(rotation_ != rot) {
|
||||
rotation_ = rot; has_rotation_ = true; tr_cache_valid_ = false;
|
||||
rmt_valid_ = false; lmb_valid_ = false;
|
||||
bb_cache_.valid = false;
|
||||
}
|
||||
}
|
||||
|
||||
inline void translation(const TPoint<RawShape>& tr) BP2D_NOEXCEPT
|
||||
{
|
||||
if(translation_ != tr) {
|
||||
translation_ = tr; has_translation_ = true; tr_cache_valid_ = false;
|
||||
//bb_cache_.valid = false;
|
||||
}
|
||||
}
|
||||
|
||||
inline const RawShape& transformedShape() const
|
||||
{
|
||||
if(tr_cache_valid_) return tr_cache_;
|
||||
|
||||
RawShape cpy = infaltedShape();
|
||||
if(has_rotation_) sl::rotate(cpy, rotation_);
|
||||
if(has_translation_) sl::translate(cpy, translation_);
|
||||
tr_cache_ = cpy; tr_cache_valid_ = true;
|
||||
rmt_valid_ = false; lmb_valid_ = false;
|
||||
|
||||
return tr_cache_;
|
||||
}
|
||||
|
||||
inline operator RawShape() const
|
||||
{
|
||||
return transformedShape();
|
||||
}
|
||||
|
||||
inline const RawShape& rawShape() const BP2D_NOEXCEPT
|
||||
{
|
||||
return sh_;
|
||||
}
|
||||
|
||||
inline void resetTransformation() BP2D_NOEXCEPT
|
||||
{
|
||||
has_translation_ = false; has_rotation_ = false; has_inflation_ = false;
|
||||
invalidateCache();
|
||||
}
|
||||
|
||||
inline Box boundingBox() const {
|
||||
if(!bb_cache_.valid) {
|
||||
if(!has_rotation_)
|
||||
bb_cache_.bb = sl::boundingBox(infaltedShape());
|
||||
else {
|
||||
// TODO make sure this works
|
||||
auto rotsh = infaltedShape();
|
||||
sl::rotate(rotsh, rotation_);
|
||||
bb_cache_.bb = sl::boundingBox(rotsh);
|
||||
}
|
||||
bb_cache_.valid = true;
|
||||
}
|
||||
|
||||
auto &bb = bb_cache_.bb; auto &tr = translation_;
|
||||
return {bb.minCorner() + tr, bb.maxCorner() + tr };
|
||||
}
|
||||
|
||||
inline Vertex referenceVertex() const {
|
||||
return rightmostTopVertex();
|
||||
}
|
||||
|
||||
inline Vertex rightmostTopVertex() const {
|
||||
if(!rmt_valid_ || !tr_cache_valid_) { // find max x and max y vertex
|
||||
auto& tsh = transformedShape();
|
||||
rmt_ = std::max_element(sl::cbegin(tsh), sl::cend(tsh), vsort);
|
||||
rmt_valid_ = true;
|
||||
}
|
||||
return *rmt_;
|
||||
}
|
||||
|
||||
inline Vertex leftmostBottomVertex() const {
|
||||
if(!lmb_valid_ || !tr_cache_valid_) { // find min x and min y vertex
|
||||
auto& tsh = transformedShape();
|
||||
lmb_ = std::min_element(sl::cbegin(tsh), sl::cend(tsh), vsort);
|
||||
lmb_valid_ = true;
|
||||
}
|
||||
return *lmb_;
|
||||
}
|
||||
|
||||
//Static methods:
|
||||
|
||||
inline static bool intersects(const _Item& sh1, const _Item& sh2)
|
||||
{
|
||||
return sl::intersects(sh1.transformedShape(),
|
||||
sh2.transformedShape());
|
||||
}
|
||||
|
||||
inline static bool touches(const _Item& sh1, const _Item& sh2)
|
||||
{
|
||||
return sl::touches(sh1.transformedShape(),
|
||||
sh2.transformedShape());
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
inline const RawShape& infaltedShape() const {
|
||||
if(has_inflation_ ) {
|
||||
if(inflate_cache_valid_) return inflate_cache_;
|
||||
|
||||
inflate_cache_ = sh_;
|
||||
sl::offset(inflate_cache_, inflation_);
|
||||
inflate_cache_valid_ = true;
|
||||
return inflate_cache_;
|
||||
}
|
||||
return sh_;
|
||||
}
|
||||
|
||||
inline void invalidateCache() const BP2D_NOEXCEPT
|
||||
{
|
||||
tr_cache_valid_ = false;
|
||||
lmb_valid_ = false; rmt_valid_ = false;
|
||||
area_cache_valid_ = false;
|
||||
inflate_cache_valid_ = false;
|
||||
bb_cache_.valid = false;
|
||||
convexity_ = Convexity::UNCHECKED;
|
||||
}
|
||||
|
||||
static inline bool vsort(const Vertex& v1, const Vertex& v2)
|
||||
{
|
||||
TCompute<Vertex> x1 = getX(v1), x2 = getX(v2);
|
||||
TCompute<Vertex> y1 = getY(v1), y2 = getY(v2);
|
||||
return y1 == y2 ? x1 < x2 : y1 < y2;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* \brief Subclass of _Item for regular rectangle items.
|
||||
*/
|
||||
template<class RawShape>
|
||||
class _Rectangle: public _Item<RawShape> {
|
||||
using _Item<RawShape>::vertex;
|
||||
using TO = Orientation;
|
||||
public:
|
||||
|
||||
using Unit = TCoord<TPoint<RawShape>>;
|
||||
|
||||
template<TO o = OrientationType<RawShape>::Value>
|
||||
inline _Rectangle(Unit width, Unit height,
|
||||
// disable this ctor if o != CLOCKWISE
|
||||
enable_if_t< o == TO::CLOCKWISE, int> = 0 ):
|
||||
_Item<RawShape>( sl::create<RawShape>( {
|
||||
{0, 0},
|
||||
{0, height},
|
||||
{width, height},
|
||||
{width, 0},
|
||||
{0, 0}
|
||||
} ))
|
||||
{
|
||||
}
|
||||
|
||||
template<TO o = OrientationType<RawShape>::Value>
|
||||
inline _Rectangle(Unit width, Unit height,
|
||||
// disable this ctor if o != COUNTER_CLOCKWISE
|
||||
enable_if_t< o == TO::COUNTER_CLOCKWISE, int> = 0 ):
|
||||
_Item<RawShape>( sl::create<RawShape>( {
|
||||
{0, 0},
|
||||
{width, 0},
|
||||
{width, height},
|
||||
{0, height},
|
||||
{0, 0}
|
||||
} ))
|
||||
{
|
||||
}
|
||||
|
||||
inline Unit width() const BP2D_NOEXCEPT {
|
||||
return getX(vertex(2));
|
||||
}
|
||||
|
||||
inline Unit height() const BP2D_NOEXCEPT {
|
||||
return getY(vertex(2));
|
||||
}
|
||||
};
|
||||
|
||||
template<class RawShape>
|
||||
inline bool _Item<RawShape>::isInside(const _Box<TPoint<RawShape>>& box) const {
|
||||
return sl::isInside(boundingBox(), box);
|
||||
}
|
||||
|
||||
template<class RawShape> inline bool
|
||||
_Item<RawShape>::isInside(const _Circle<TPoint<RawShape>>& circ) const {
|
||||
return sl::isInside(transformedShape(), circ);
|
||||
}
|
||||
|
||||
template<class RawShape> using _ItemRef = std::reference_wrapper<_Item<RawShape>>;
|
||||
template<class RawShape> using _ItemGroup = std::vector<_ItemRef<RawShape>>;
|
||||
|
||||
/**
|
||||
* \brief A list of packed item vectors. Each vector represents a bin.
|
||||
*/
|
||||
template<class RawShape>
|
||||
using _PackGroup = std::vector<std::vector<_ItemRef<RawShape>>>;
|
||||
|
||||
template<class Iterator>
|
||||
struct ConstItemRange {
|
||||
Iterator from;
|
||||
Iterator to;
|
||||
bool valid = false;
|
||||
|
||||
ConstItemRange() = default;
|
||||
ConstItemRange(Iterator f, Iterator t): from(f), to(t), valid(true) {}
|
||||
};
|
||||
|
||||
template<class Container>
|
||||
inline ConstItemRange<typename Container::const_iterator>
|
||||
rem(typename Container::const_iterator it, const Container& cont) {
|
||||
return {std::next(it), cont.end()};
|
||||
}
|
||||
|
||||
/**
|
||||
* \brief A wrapper interface (trait) class for any placement strategy provider.
|
||||
*
|
||||
* If a client wants to use its own placement algorithm, all it has to do is to
|
||||
* specialize this class template and define all the ten methods it has. It can
|
||||
* use the strategies::PlacerBoilerplace class for creating a new placement
|
||||
* strategy where only the constructor and the trypack method has to be provided
|
||||
* and it will work out of the box.
|
||||
*/
|
||||
template<class PlacementStrategy>
|
||||
class PlacementStrategyLike {
|
||||
PlacementStrategy impl_;
|
||||
public:
|
||||
|
||||
using RawShape = typename PlacementStrategy::ShapeType;
|
||||
|
||||
/// The item type that the placer works with.
|
||||
using Item = _Item<RawShape>;
|
||||
|
||||
/// The placer's config type. Should be a simple struct but can be anything.
|
||||
using Config = typename PlacementStrategy::Config;
|
||||
|
||||
/**
|
||||
* \brief The type of the bin that the placer works with.
|
||||
*
|
||||
* Can be a box or an arbitrary shape or just a width or height without a
|
||||
* second dimension if an infinite bin is considered.
|
||||
*/
|
||||
using BinType = typename PlacementStrategy::BinType;
|
||||
|
||||
/**
|
||||
* \brief Pack result that can be used to accept or discard it. See trypack
|
||||
* method.
|
||||
*/
|
||||
using PackResult = typename PlacementStrategy::PackResult;
|
||||
|
||||
using ItemGroup = _ItemGroup<RawShape>;
|
||||
using DefaultIterator = typename ItemGroup::const_iterator;
|
||||
|
||||
/**
|
||||
* @brief Constructor taking the bin and an optional configuration.
|
||||
* @param bin The bin object whose type is defined by the placement strategy.
|
||||
* @param config The configuration for the particular placer.
|
||||
*/
|
||||
explicit PlacementStrategyLike(const BinType& bin,
|
||||
const Config& config = Config()):
|
||||
impl_(bin)
|
||||
{
|
||||
configure(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Provide a different configuration for the placer.
|
||||
*
|
||||
* Note that it depends on the particular placer implementation how it
|
||||
* reacts to config changes in the middle of a calculation.
|
||||
*
|
||||
* @param config The configuration object defined by the placement strategy.
|
||||
*/
|
||||
inline void configure(const Config& config) { impl_.configure(config); }
|
||||
|
||||
/**
|
||||
* Try to pack an item with a result object that contains the packing
|
||||
* information for later accepting it.
|
||||
*
|
||||
* \param item_store A container of items that are intended to be packed
|
||||
* later. Can be used by the placer to switch tactics. When it's knows that
|
||||
* many items will come a greedy strategy may not be the best.
|
||||
* \param from The iterator to the item from which the packing should start,
|
||||
* including the pointed item
|
||||
* \param count How many items should be packed. If the value is 1, than
|
||||
* just the item pointed to by "from" argument should be packed.
|
||||
*/
|
||||
template<class Iter = DefaultIterator>
|
||||
inline PackResult trypack(
|
||||
Item& item,
|
||||
const ConstItemRange<Iter>& remaining = ConstItemRange<Iter>())
|
||||
{
|
||||
return impl_.trypack(item, remaining);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief A method to accept a previously tried item (or items).
|
||||
*
|
||||
* If the pack result is a failure the method should ignore it.
|
||||
* @param r The result of a previous trypack call.
|
||||
*/
|
||||
inline void accept(PackResult& r) { impl_.accept(r); }
|
||||
|
||||
/**
|
||||
* @brief pack Try to pack and immediately accept it on success.
|
||||
*
|
||||
* A default implementation would be to call
|
||||
* { auto&& r = trypack(...); accept(r); return r; } but we should let the
|
||||
* implementor of the placement strategy to harvest any optimizations from
|
||||
* the absence of an intermediate step. The above version can still be used
|
||||
* in the implementation.
|
||||
*
|
||||
* @param item The item to pack.
|
||||
* @return Returns true if the item was packed or false if it could not be
|
||||
* packed.
|
||||
*/
|
||||
template<class Range = ConstItemRange<DefaultIterator>>
|
||||
inline bool pack(
|
||||
Item& item,
|
||||
const Range& remaining = Range())
|
||||
{
|
||||
return impl_.pack(item, remaining);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method makes possible to "preload" some items into the placer. It
|
||||
* will not move these items but will consider them as already packed.
|
||||
*/
|
||||
inline void preload(const ItemGroup& packeditems)
|
||||
{
|
||||
impl_.preload(packeditems);
|
||||
}
|
||||
|
||||
/// Unpack the last element (remove it from the list of packed items).
|
||||
inline void unpackLast() { impl_.unpackLast(); }
|
||||
|
||||
/// Get the bin object.
|
||||
inline const BinType& bin() const { return impl_.bin(); }
|
||||
|
||||
/// Set a new bin object.
|
||||
inline void bin(const BinType& bin) { impl_.bin(bin); }
|
||||
|
||||
/// Get the packed items.
|
||||
inline ItemGroup getItems() { return impl_.getItems(); }
|
||||
|
||||
/// Clear the packed items so a new session can be started.
|
||||
inline void clearItems() { impl_.clearItems(); }
|
||||
|
||||
inline double filledArea() const { return impl_.filledArea(); }
|
||||
|
||||
};
|
||||
|
||||
// The progress function will be called with the number of placed items
|
||||
using ProgressFunction = std::function<void(unsigned)>;
|
||||
using StopCondition = std::function<bool(void)>;
|
||||
|
||||
/**
|
||||
* A wrapper interface (trait) class for any selections strategy provider.
|
||||
*/
|
||||
template<class SelectionStrategy>
|
||||
class SelectionStrategyLike {
|
||||
SelectionStrategy impl_;
|
||||
public:
|
||||
using RawShape = typename SelectionStrategy::ShapeType;
|
||||
using Item = _Item<RawShape>;
|
||||
using PackGroup = _PackGroup<RawShape>;
|
||||
using Config = typename SelectionStrategy::Config;
|
||||
|
||||
|
||||
/**
|
||||
* @brief Provide a different configuration for the selection strategy.
|
||||
*
|
||||
* Note that it depends on the particular placer implementation how it
|
||||
* reacts to config changes in the middle of a calculation.
|
||||
*
|
||||
* @param config The configuration object defined by the selection strategy.
|
||||
*/
|
||||
inline void configure(const Config& config) {
|
||||
impl_.configure(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief A function callback which should be called whenever an item or
|
||||
* a group of items where successfully packed.
|
||||
* @param fn A function callback object taking one unsigned integer as the
|
||||
* number of the remaining items to pack.
|
||||
*/
|
||||
void progressIndicator(ProgressFunction fn) { impl_.progressIndicator(fn); }
|
||||
|
||||
void stopCondition(StopCondition cond) { impl_.stopCondition(cond); }
|
||||
|
||||
/**
|
||||
* \brief A method to start the calculation on the input sequence.
|
||||
*
|
||||
* \tparam TPlacer The only mandatory template parameter is the type of
|
||||
* placer compatible with the PlacementStrategyLike interface.
|
||||
*
|
||||
* \param first, last The first and last iterator if the input sequence. It
|
||||
* can be only an iterator of a type convertible to Item.
|
||||
* \param bin. The shape of the bin. It has to be supported by the placement
|
||||
* strategy.
|
||||
* \param An optional config object for the placer.
|
||||
*/
|
||||
template<class TPlacer, class TIterator,
|
||||
class TBin = typename PlacementStrategyLike<TPlacer>::BinType,
|
||||
class PConfig = typename PlacementStrategyLike<TPlacer>::Config>
|
||||
inline void packItems(
|
||||
TIterator first,
|
||||
TIterator last,
|
||||
TBin&& bin,
|
||||
PConfig&& config = PConfig() )
|
||||
{
|
||||
impl_.template packItems<TPlacer>(first, last,
|
||||
std::forward<TBin>(bin),
|
||||
std::forward<PConfig>(config));
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get the items for a particular bin.
|
||||
* @param binIndex The index of the requested bin.
|
||||
* @return Returns a list of all items packed into the requested bin.
|
||||
*/
|
||||
inline const PackGroup& getResult() const {
|
||||
return impl_.getResult();
|
||||
}
|
||||
|
||||
void clear() { impl_.clear(); }
|
||||
};
|
||||
|
||||
/**
|
||||
* The _Nester is the front-end class for the libnest2d library. It takes the
|
||||
* input items and changes their transformations to be inside the provided bin.
|
||||
*/
|
||||
template<class PlacementStrategy, class SelectionStrategy >
|
||||
class _Nester {
|
||||
using TSel = SelectionStrategyLike<SelectionStrategy>;
|
||||
TSel selector_;
|
||||
|
||||
public:
|
||||
using Item = typename PlacementStrategy::Item;
|
||||
using ShapeType = typename Item::ShapeType;
|
||||
using ItemRef = std::reference_wrapper<Item>;
|
||||
using TPlacer = PlacementStrategyLike<PlacementStrategy>;
|
||||
using BinType = typename TPlacer::BinType;
|
||||
using PlacementConfig = typename TPlacer::Config;
|
||||
using SelectionConfig = typename TSel::Config;
|
||||
using Coord = TCoord<TPoint<typename Item::ShapeType>>;
|
||||
using PackGroup = _PackGroup<typename Item::ShapeType>;
|
||||
using ResultType = PackGroup;
|
||||
|
||||
private:
|
||||
BinType bin_;
|
||||
PlacementConfig pconfig_;
|
||||
Coord min_obj_distance_;
|
||||
|
||||
using SItem = typename SelectionStrategy::Item;
|
||||
using TPItem = remove_cvref_t<Item>;
|
||||
using TSItem = remove_cvref_t<SItem>;
|
||||
|
||||
StopCondition stopfn_;
|
||||
|
||||
template<class It> using TVal = remove_ref_t<typename It::value_type>;
|
||||
|
||||
template<class It, class Out>
|
||||
using ItemIteratorOnly =
|
||||
enable_if_t<std::is_convertible<TVal<It>&, TPItem&>::value, Out>;
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* \brief Constructor taking the bin as the only mandatory parameter.
|
||||
*
|
||||
* \param bin The bin shape that will be used by the placers. The type
|
||||
* of the bin should be one that is supported by the placer type.
|
||||
*/
|
||||
template<class TBinType = BinType,
|
||||
class PConf = PlacementConfig,
|
||||
class SConf = SelectionConfig>
|
||||
_Nester(TBinType&& bin, Coord min_obj_distance = 0,
|
||||
const PConf& pconfig = PConf(), const SConf& sconfig = SConf()):
|
||||
bin_(std::forward<TBinType>(bin)),
|
||||
pconfig_(pconfig),
|
||||
min_obj_distance_(min_obj_distance)
|
||||
{
|
||||
static_assert( std::is_same<TPItem, TSItem>::value,
|
||||
"Incompatible placement and selection strategy!");
|
||||
|
||||
selector_.configure(sconfig);
|
||||
}
|
||||
|
||||
void configure(const PlacementConfig& pconf) { pconfig_ = pconf; }
|
||||
void configure(const SelectionConfig& sconf) { selector_.configure(sconf); }
|
||||
void configure(const PlacementConfig& pconf, const SelectionConfig& sconf)
|
||||
{
|
||||
pconfig_ = pconf;
|
||||
selector_.configure(sconf);
|
||||
}
|
||||
void configure(const SelectionConfig& sconf, const PlacementConfig& pconf)
|
||||
{
|
||||
pconfig_ = pconf;
|
||||
selector_.configure(sconf);
|
||||
}
|
||||
|
||||
/**
|
||||
* \brief Arrange an input sequence of _Item-s.
|
||||
*
|
||||
* To get the result, call the translation(), rotation() and binId()
|
||||
* methods of each item. If only the transformed polygon is needed, call
|
||||
* transformedShape() to get the properly transformed shapes.
|
||||
*
|
||||
* The number of groups in the pack group is the number of bins opened by
|
||||
* the selection algorithm.
|
||||
*/
|
||||
template<class It>
|
||||
inline ItemIteratorOnly<It, size_t> execute(It from, It to)
|
||||
{
|
||||
auto infl = static_cast<Coord>(std::ceil(min_obj_distance_/2.0));
|
||||
if(infl > 0) std::for_each(from, to, [this, infl](Item& item) {
|
||||
item.inflate(infl);
|
||||
});
|
||||
|
||||
selector_.template packItems<PlacementStrategy>(
|
||||
from, to, bin_, pconfig_);
|
||||
|
||||
if(min_obj_distance_ > 0) std::for_each(from, to, [infl](Item& item) {
|
||||
item.inflate(-infl);
|
||||
});
|
||||
|
||||
return selector_.getResult().size();
|
||||
}
|
||||
|
||||
/// Set a progress indicator function object for the selector.
|
||||
inline _Nester& progressIndicator(ProgressFunction func)
|
||||
{
|
||||
selector_.progressIndicator(func); return *this;
|
||||
}
|
||||
|
||||
/// Set a predicate to tell when to abort nesting.
|
||||
inline _Nester& stopCondition(StopCondition fn)
|
||||
{
|
||||
stopfn_ = fn; selector_.stopCondition(fn); return *this;
|
||||
}
|
||||
|
||||
inline const PackGroup& lastResult() const
|
||||
{
|
||||
return selector_.getResult();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // NESTER_HPP
|
|
@ -1,61 +0,0 @@
|
|||
find_package(NLopt 1.4)
|
||||
|
||||
if(NOT NLopt_FOUND)
|
||||
message(STATUS "NLopt not found so downloading "
|
||||
"and automatic build is performed...")
|
||||
|
||||
include(DownloadProject)
|
||||
|
||||
if (CMAKE_VERSION VERSION_LESS 3.2)
|
||||
set(UPDATE_DISCONNECTED_IF_AVAILABLE "")
|
||||
else()
|
||||
set(UPDATE_DISCONNECTED_IF_AVAILABLE "UPDATE_DISCONNECTED 1")
|
||||
endif()
|
||||
|
||||
set(URL_NLOPT "https://github.com/stevengj/nlopt.git"
|
||||
CACHE STRING "Location of the nlopt git repository")
|
||||
|
||||
# set(NLopt_DIR ${CMAKE_BINARY_DIR}/nlopt)
|
||||
include(DownloadProject)
|
||||
download_project( PROJ nlopt
|
||||
GIT_REPOSITORY ${URL_NLOPT}
|
||||
GIT_TAG v2.5.0
|
||||
# CMAKE_CACHE_ARGS -DBUILD_SHARED_LIBS:BOOL=OFF -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} -DCMAKE_INSTALL_PREFIX=${NLopt_DIR}
|
||||
${UPDATE_DISCONNECTED_IF_AVAILABLE}
|
||||
)
|
||||
|
||||
set(SHARED_LIBS_STATE BUILD_SHARED_LIBS)
|
||||
set(BUILD_SHARED_LIBS OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_PYTHON OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_OCTAVE OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_MATLAB OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_GUILE OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_SWIG OFF CACHE BOOL "" FORCE)
|
||||
set(NLOPT_LINK_PYTHON OFF CACHE BOOL "" FORCE)
|
||||
|
||||
add_subdirectory(${nlopt_SOURCE_DIR} ${nlopt_BINARY_DIR})
|
||||
|
||||
set(NLopt_LIBS nlopt)
|
||||
set(NLopt_INCLUDE_DIR ${nlopt_BINARY_DIR} ${nlopt_BINARY_DIR}/src/api)
|
||||
set(SHARED_LIBS_STATE ${SHARED_STATE})
|
||||
|
||||
add_library(nloptOptimizer INTERFACE)
|
||||
target_link_libraries(nloptOptimizer INTERFACE nlopt)
|
||||
target_include_directories(nloptOptimizer INTERFACE ${NLopt_INCLUDE_DIR})
|
||||
|
||||
else()
|
||||
add_library(nloptOptimizer INTERFACE)
|
||||
target_link_libraries(nloptOptimizer INTERFACE Nlopt::Nlopt)
|
||||
endif()
|
||||
|
||||
#target_sources( nloptOptimizer INTERFACE
|
||||
#${CMAKE_CURRENT_SOURCE_DIR}/simplex.hpp
|
||||
#${CMAKE_CURRENT_SOURCE_DIR}/subplex.hpp
|
||||
#${CMAKE_CURRENT_SOURCE_DIR}/genetic.hpp
|
||||
#${CMAKE_CURRENT_SOURCE_DIR}/nlopt_boilerplate.hpp
|
||||
#)
|
||||
|
||||
target_compile_definitions(nloptOptimizer INTERFACE LIBNEST2D_OPTIMIZER_NLOPT)
|
||||
|
||||
# And finally plug the nloptOptimizer into libnest2d
|
||||
#target_link_libraries(libnest2d INTERFACE nloptOptimizer)
|
|
@ -1122,8 +1122,6 @@ private:
|
|||
sl::rotate(sh, item.rotation());
|
||||
|
||||
Box bb = sl::boundingBox(sh);
|
||||
bb.minCorner() += item.translation();
|
||||
bb.maxCorner() += item.translation();
|
||||
|
||||
Vertex ci, cb;
|
||||
auto bbin = sl::boundingBox(bin_);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#ifndef PLACER_BOILERPLATE_HPP
|
||||
#define PLACER_BOILERPLATE_HPP
|
||||
|
||||
#include <libnest2d/libnest2d.hpp>
|
||||
#include <libnest2d/nester.hpp>
|
||||
|
||||
namespace libnest2d { namespace placers {
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
#define SELECTION_BOILERPLATE_HPP
|
||||
|
||||
#include <atomic>
|
||||
#include <libnest2d/libnest2d.hpp>
|
||||
#include <libnest2d/nester.hpp>
|
||||
|
||||
namespace libnest2d { namespace selections {
|
||||
|
||||
|
@ -25,7 +25,7 @@ public:
|
|||
inline void clear() { packed_bins_.clear(); }
|
||||
|
||||
protected:
|
||||
|
||||
|
||||
template<class Placer, class Container, class Bin, class PCfg>
|
||||
void remove_unpackable_items(Container &c, const Bin &bin, const PCfg& pcfg)
|
||||
{
|
||||
|
@ -33,14 +33,14 @@ protected:
|
|||
// then it should be removed from the list
|
||||
auto it = c.begin();
|
||||
while (it != c.end() && !stopcond_()) {
|
||||
|
||||
|
||||
// WARNING: The copy of itm needs to be created before Placer.
|
||||
// Placer is working with references and its destructor still
|
||||
// manipulates the item this is why the order of stack creation
|
||||
// matters here.
|
||||
// matters here.
|
||||
const Item& itm = *it;
|
||||
Item cpy{itm};
|
||||
|
||||
|
||||
Placer p{bin};
|
||||
p.configure(pcfg);
|
||||
if (itm.area() <= 0 || !p.pack(cpy)) it = c.erase(it);
|
||||
|
|
|
@ -1,23 +1,26 @@
|
|||
#include <libnest2d.h>
|
||||
#include <libnest2d/libnest2d.hpp>
|
||||
|
||||
namespace libnest2d {
|
||||
|
||||
template class Nester<NfpPlacer, FirstFitSelection>;
|
||||
template class Nester<BottomLeftPlacer, FirstFitSelection>;
|
||||
template class _Nester<NfpPlacer, FirstFitSelection>;
|
||||
template class _Nester<BottomLeftPlacer, FirstFitSelection>;
|
||||
|
||||
template PackGroup nest(std::vector<Item>::iterator from,
|
||||
std::vector<Item>::iterator to,
|
||||
const Box& bin,
|
||||
Coord dist = 0,
|
||||
const NfpPlacer::Config& pconf,
|
||||
const FirstFitSelection::Config& sconf);
|
||||
template std::size_t _Nester<NfpPlacer, FirstFitSelection>::execute(
|
||||
std::vector<Item>::iterator, std::vector<Item>::iterator);
|
||||
template std::size_t _Nester<BottomLeftPlacer, FirstFitSelection>::execute(
|
||||
std::vector<Item>::iterator, std::vector<Item>::iterator);
|
||||
|
||||
template PackGroup nest(std::vector<Item>::iterator from,
|
||||
std::vector<Item>::iterator to,
|
||||
const Box& bin,
|
||||
ProgressFunction prg,
|
||||
StopCondition scond,
|
||||
Coord dist = 0,
|
||||
const NfpPlacer::Config& pconf,
|
||||
const FirstFitSelection::Config& sconf);
|
||||
template std::size_t nest(std::vector<Item>::iterator from,
|
||||
std::vector<Item>::iterator to,
|
||||
const Box & bin,
|
||||
Coord dist,
|
||||
const NestConfig<NfpPlacer, FirstFitSelection> &cfg,
|
||||
NestControl ctl);
|
||||
|
||||
template std::size_t nest(std::vector<Item>::iterator from,
|
||||
std::vector<Item>::iterator to,
|
||||
const Box & bin,
|
||||
Coord dist,
|
||||
const NestConfig<BottomLeftPlacer, FirstFitSelection> &cfg,
|
||||
NestControl ctl);
|
||||
}
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
|
||||
# Try to find existing GTest installation
|
||||
find_package(GTest 1.7)
|
||||
|
||||
if(NOT GTEST_FOUND)
|
||||
set(URL_GTEST "https://github.com/google/googletest.git"
|
||||
CACHE STRING "Google test source code repository location.")
|
||||
|
||||
message(STATUS "GTest not found so downloading from ${URL_GTEST}")
|
||||
# Go and download google test framework, integrate it with the build
|
||||
set(GTEST_LIBS_TO_LINK gtest gtest_main)
|
||||
|
||||
if (CMAKE_VERSION VERSION_LESS 3.2)
|
||||
set(UPDATE_DISCONNECTED_IF_AVAILABLE "")
|
||||
else()
|
||||
set(UPDATE_DISCONNECTED_IF_AVAILABLE "UPDATE_DISCONNECTED 1")
|
||||
endif()
|
||||
|
||||
include(DownloadProject)
|
||||
download_project(PROJ googletest
|
||||
GIT_REPOSITORY ${URL_GTEST}
|
||||
GIT_TAG release-1.7.0
|
||||
${UPDATE_DISCONNECTED_IF_AVAILABLE}
|
||||
)
|
||||
|
||||
# Prevent GoogleTest from overriding our compiler/linker options
|
||||
# when building with Visual Studio
|
||||
set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
|
||||
|
||||
add_subdirectory(${googletest_SOURCE_DIR}
|
||||
${googletest_BINARY_DIR}
|
||||
)
|
||||
|
||||
set(GTEST_INCLUDE_DIRS ${googletest_SOURCE_DIR}/include)
|
||||
|
||||
else()
|
||||
find_package(Threads REQUIRED)
|
||||
set(GTEST_LIBS_TO_LINK ${GTEST_BOTH_LIBRARIES} Threads::Threads)
|
||||
endif()
|
||||
|
||||
add_executable(tests_clipper_nlopt
|
||||
test.cpp
|
||||
../tools/svgtools.hpp
|
||||
# ../tools/libnfpglue.hpp
|
||||
# ../tools/libnfpglue.cpp
|
||||
printer_parts.h
|
||||
printer_parts.cpp
|
||||
)
|
||||
|
||||
target_link_libraries(tests_clipper_nlopt libnest2d ${GTEST_LIBS_TO_LINK} )
|
||||
|
||||
target_include_directories(tests_clipper_nlopt PRIVATE BEFORE ${GTEST_INCLUDE_DIRS})
|
||||
|
||||
if(NOT LIBNEST2D_HEADER_ONLY)
|
||||
target_link_libraries(tests_clipper_nlopt ${LIBNAME})
|
||||
else()
|
||||
target_link_libraries(tests_clipper_nlopt libnest2d)
|
||||
endif()
|
||||
|
||||
add_test(libnest2d_tests tests_clipper_nlopt)
|
|
@ -375,7 +375,7 @@ public:
|
|||
|
||||
for(unsigned idx = 0; idx < fixeditems.size(); ++idx) {
|
||||
Item& itm = fixeditems[idx];
|
||||
itm.markAsFixed();
|
||||
itm.markAsFixedInBin(itm.binId());
|
||||
}
|
||||
|
||||
m_pck.configure(m_pconf);
|
||||
|
|
|
@ -15,7 +15,7 @@ public:
|
|||
PointClass max;
|
||||
bool defined;
|
||||
|
||||
BoundingBoxBase() : defined(false), min(PointClass::Zero()), max(PointClass::Zero()) {}
|
||||
BoundingBoxBase() : min(PointClass::Zero()), max(PointClass::Zero()), defined(false) {}
|
||||
BoundingBoxBase(const PointClass &pmin, const PointClass &pmax) :
|
||||
min(pmin), max(pmax), defined(pmin(0) < pmax(0) && pmin(1) < pmax(1)) {}
|
||||
BoundingBoxBase(const std::vector<PointClass>& points) : min(PointClass::Zero()), max(PointClass::Zero())
|
||||
|
@ -59,7 +59,7 @@ template <class PointClass>
|
|||
class BoundingBox3Base : public BoundingBoxBase<PointClass>
|
||||
{
|
||||
public:
|
||||
BoundingBox3Base() : BoundingBoxBase<PointClass>() {};
|
||||
BoundingBox3Base() : BoundingBoxBase<PointClass>() {}
|
||||
BoundingBox3Base(const PointClass &pmin, const PointClass &pmax) :
|
||||
BoundingBoxBase<PointClass>(pmin, pmax)
|
||||
{ if (pmin(2) >= pmax(2)) BoundingBoxBase<PointClass>::defined = false; }
|
||||
|
@ -100,6 +100,33 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
// Will prevent warnings caused by non existing definition of template in hpp
|
||||
extern template void BoundingBoxBase<Point>::scale(double factor);
|
||||
extern template void BoundingBoxBase<Vec2d>::scale(double factor);
|
||||
extern template void BoundingBoxBase<Vec3d>::scale(double factor);
|
||||
extern template void BoundingBoxBase<Point>::offset(coordf_t delta);
|
||||
extern template void BoundingBoxBase<Vec2d>::offset(coordf_t delta);
|
||||
extern template void BoundingBoxBase<Point>::merge(const Point &point);
|
||||
extern template void BoundingBoxBase<Vec2d>::merge(const Vec2d &point);
|
||||
extern template void BoundingBoxBase<Point>::merge(const Points &points);
|
||||
extern template void BoundingBoxBase<Vec2d>::merge(const Pointfs &points);
|
||||
extern template void BoundingBoxBase<Point>::merge(const BoundingBoxBase<Point> &bb);
|
||||
extern template void BoundingBoxBase<Vec2d>::merge(const BoundingBoxBase<Vec2d> &bb);
|
||||
extern template Point BoundingBoxBase<Point>::size() const;
|
||||
extern template Vec2d BoundingBoxBase<Vec2d>::size() const;
|
||||
extern template double BoundingBoxBase<Point>::radius() const;
|
||||
extern template double BoundingBoxBase<Vec2d>::radius() const;
|
||||
extern template Point BoundingBoxBase<Point>::center() const;
|
||||
extern template Vec2d BoundingBoxBase<Vec2d>::center() const;
|
||||
extern template void BoundingBox3Base<Vec3d>::merge(const Vec3d &point);
|
||||
extern template void BoundingBox3Base<Vec3d>::merge(const Pointf3s &points);
|
||||
extern template void BoundingBox3Base<Vec3d>::merge(const BoundingBox3Base<Vec3d> &bb);
|
||||
extern template Vec3d BoundingBox3Base<Vec3d>::size() const;
|
||||
extern template double BoundingBox3Base<Vec3d>::radius() const;
|
||||
extern template void BoundingBox3Base<Vec3d>::offset(coordf_t delta);
|
||||
extern template Vec3d BoundingBox3Base<Vec3d>::center() const;
|
||||
extern template coordf_t BoundingBox3Base<Vec3d>::max_size() const;
|
||||
|
||||
class BoundingBox : public BoundingBoxBase<Point>
|
||||
{
|
||||
public:
|
||||
|
@ -113,9 +140,9 @@ public:
|
|||
// to encompass the original bounding box.
|
||||
void align_to_grid(const coord_t cell_size);
|
||||
|
||||
BoundingBox() : BoundingBoxBase<Point>() {};
|
||||
BoundingBox(const Point &pmin, const Point &pmax) : BoundingBoxBase<Point>(pmin, pmax) {};
|
||||
BoundingBox(const Points &points) : BoundingBoxBase<Point>(points) {};
|
||||
BoundingBox() : BoundingBoxBase<Point>() {}
|
||||
BoundingBox(const Point &pmin, const Point &pmax) : BoundingBoxBase<Point>(pmin, pmax) {}
|
||||
BoundingBox(const Points &points) : BoundingBoxBase<Point>(points) {}
|
||||
BoundingBox(const Lines &lines);
|
||||
|
||||
friend BoundingBox get_extents_rotated(const Points &points, double angle);
|
||||
|
@ -124,25 +151,25 @@ public:
|
|||
class BoundingBox3 : public BoundingBox3Base<Vec3crd>
|
||||
{
|
||||
public:
|
||||
BoundingBox3() : BoundingBox3Base<Vec3crd>() {};
|
||||
BoundingBox3(const Vec3crd &pmin, const Vec3crd &pmax) : BoundingBox3Base<Vec3crd>(pmin, pmax) {};
|
||||
BoundingBox3(const Points3& points) : BoundingBox3Base<Vec3crd>(points) {};
|
||||
BoundingBox3() : BoundingBox3Base<Vec3crd>() {}
|
||||
BoundingBox3(const Vec3crd &pmin, const Vec3crd &pmax) : BoundingBox3Base<Vec3crd>(pmin, pmax) {}
|
||||
BoundingBox3(const Points3& points) : BoundingBox3Base<Vec3crd>(points) {}
|
||||
};
|
||||
|
||||
class BoundingBoxf : public BoundingBoxBase<Vec2d>
|
||||
{
|
||||
public:
|
||||
BoundingBoxf() : BoundingBoxBase<Vec2d>() {};
|
||||
BoundingBoxf(const Vec2d &pmin, const Vec2d &pmax) : BoundingBoxBase<Vec2d>(pmin, pmax) {};
|
||||
BoundingBoxf(const std::vector<Vec2d> &points) : BoundingBoxBase<Vec2d>(points) {};
|
||||
BoundingBoxf() : BoundingBoxBase<Vec2d>() {}
|
||||
BoundingBoxf(const Vec2d &pmin, const Vec2d &pmax) : BoundingBoxBase<Vec2d>(pmin, pmax) {}
|
||||
BoundingBoxf(const std::vector<Vec2d> &points) : BoundingBoxBase<Vec2d>(points) {}
|
||||
};
|
||||
|
||||
class BoundingBoxf3 : public BoundingBox3Base<Vec3d>
|
||||
{
|
||||
public:
|
||||
BoundingBoxf3() : BoundingBox3Base<Vec3d>() {};
|
||||
BoundingBoxf3(const Vec3d &pmin, const Vec3d &pmax) : BoundingBox3Base<Vec3d>(pmin, pmax) {};
|
||||
BoundingBoxf3(const std::vector<Vec3d> &points) : BoundingBox3Base<Vec3d>(points) {};
|
||||
BoundingBoxf3() : BoundingBox3Base<Vec3d>() {}
|
||||
BoundingBoxf3(const Vec3d &pmin, const Vec3d &pmax) : BoundingBox3Base<Vec3d>(pmin, pmax) {}
|
||||
BoundingBoxf3(const std::vector<Vec3d> &points) : BoundingBox3Base<Vec3d>(points) {}
|
||||
|
||||
BoundingBoxf3 transformed(const Transform3d& matrix) const;
|
||||
};
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
namespace Slic3r {
|
||||
|
||||
BridgeDetector::BridgeDetector(
|
||||
ExPolygon _expolygon,
|
||||
const ExPolygonCollection &_lower_slices,
|
||||
coord_t _spacing) :
|
||||
ExPolygon _expolygon,
|
||||
const ExPolygons &_lower_slices,
|
||||
coord_t _spacing) :
|
||||
// The original infill polygon, not inflated.
|
||||
expolygons(expolygons_owned),
|
||||
// All surfaces of the object supporting this region.
|
||||
|
@ -20,9 +20,9 @@ BridgeDetector::BridgeDetector(
|
|||
}
|
||||
|
||||
BridgeDetector::BridgeDetector(
|
||||
const ExPolygons &_expolygons,
|
||||
const ExPolygonCollection &_lower_slices,
|
||||
coord_t _spacing) :
|
||||
const ExPolygons &_expolygons,
|
||||
const ExPolygons &_lower_slices,
|
||||
coord_t _spacing) :
|
||||
// The original infill polygon, not inflated.
|
||||
expolygons(_expolygons),
|
||||
// All surfaces of the object supporting this region.
|
||||
|
@ -46,7 +46,11 @@ void BridgeDetector::initialize()
|
|||
// Detect what edges lie on lower slices by turning bridge contour and holes
|
||||
// into polylines and then clipping them with each lower slice's contour.
|
||||
// Currently _edges are only used to set a candidate direction of the bridge (see bridge_direction_candidates()).
|
||||
this->_edges = intersection_pl(to_polylines(grown), this->lower_slices.contours());
|
||||
Polygons contours;
|
||||
contours.reserve(this->lower_slices.size());
|
||||
for (const ExPolygon &expoly : this->lower_slices)
|
||||
contours.push_back(expoly.contour);
|
||||
this->_edges = intersection_pl(to_polylines(grown), contours);
|
||||
|
||||
#ifdef SLIC3R_DEBUG
|
||||
printf(" bridge has " PRINTF_ZU " support(s)\n", this->_edges.size());
|
||||
|
@ -54,7 +58,7 @@ void BridgeDetector::initialize()
|
|||
|
||||
// detect anchors as intersection between our bridge expolygon and the lower slices
|
||||
// safety offset required to avoid Clipper from detecting empty intersection while Boost actually found some edges
|
||||
this->_anchor_regions = intersection_ex(grown, to_polygons(this->lower_slices.expolygons), true);
|
||||
this->_anchor_regions = intersection_ex(grown, to_polygons(this->lower_slices), true);
|
||||
|
||||
/*
|
||||
if (0) {
|
||||
|
@ -271,7 +275,7 @@ BridgeDetector::unsupported_edges(double angle, Polylines* unsupported) const
|
|||
if (angle == -1) angle = this->angle;
|
||||
if (angle == -1) return;
|
||||
|
||||
Polygons grown_lower = offset(this->lower_slices.expolygons, float(this->spacing));
|
||||
Polygons grown_lower = offset(this->lower_slices, float(this->spacing));
|
||||
|
||||
for (ExPolygons::const_iterator it_expoly = this->expolygons.begin(); it_expoly != this->expolygons.end(); ++ it_expoly) {
|
||||
// get unsupported bridge edges (both contour and holes)
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
|
||||
#include "libslic3r.h"
|
||||
#include "ExPolygon.hpp"
|
||||
#include "ExPolygonCollection.hpp"
|
||||
#include <string>
|
||||
|
||||
namespace Slic3r {
|
||||
|
@ -21,7 +20,7 @@ public:
|
|||
// In case the caller gaves us the input polygons by a value, make a copy.
|
||||
ExPolygons expolygons_owned;
|
||||
// Lower slices, all regions.
|
||||
const ExPolygonCollection &lower_slices;
|
||||
const ExPolygons &lower_slices;
|
||||
// Scaled extrusion width of the infill.
|
||||
coord_t spacing;
|
||||
// Angle resolution for the brute force search of the best bridging angle.
|
||||
|
@ -29,8 +28,8 @@ public:
|
|||
// The final optimal angle.
|
||||
double angle;
|
||||
|
||||
BridgeDetector(ExPolygon _expolygon, const ExPolygonCollection &_lower_slices, coord_t _extrusion_width);
|
||||
BridgeDetector(const ExPolygons &_expolygons, const ExPolygonCollection &_lower_slices, coord_t _extrusion_width);
|
||||
BridgeDetector(ExPolygon _expolygon, const ExPolygons &_lower_slices, coord_t _extrusion_width);
|
||||
BridgeDetector(const ExPolygons &_expolygons, const ExPolygons &_lower_slices, coord_t _extrusion_width);
|
||||
// If bridge_direction_override != 0, then the angle is used instead of auto-detect.
|
||||
bool detect_angle(double bridge_direction_override = 0.);
|
||||
Polygons coverage(double angle = -1) const;
|
||||
|
|
|
@ -22,6 +22,8 @@ add_library(libslic3r STATIC
|
|||
Config.hpp
|
||||
EdgeGrid.cpp
|
||||
EdgeGrid.hpp
|
||||
ElephantFootCompensation.cpp
|
||||
ElephantFootCompensation.hpp
|
||||
ExPolygon.cpp
|
||||
ExPolygon.hpp
|
||||
ExPolygonCollection.cpp
|
||||
|
@ -71,6 +73,8 @@ add_library(libslic3r STATIC
|
|||
Format/STL.hpp
|
||||
GCode/Analyzer.cpp
|
||||
GCode/Analyzer.hpp
|
||||
GCode/ThumbnailData.cpp
|
||||
GCode/ThumbnailData.hpp
|
||||
GCode/CoolingBuffer.cpp
|
||||
GCode/CoolingBuffer.hpp
|
||||
GCode/PostProcessor.cpp
|
||||
|
@ -100,7 +104,7 @@ add_library(libslic3r STATIC
|
|||
Geometry.cpp
|
||||
Geometry.hpp
|
||||
Int128.hpp
|
||||
# KdTree.hpp
|
||||
KDTreeIndirect.hpp
|
||||
Layer.cpp
|
||||
Layer.hpp
|
||||
LayerRegion.cpp
|
||||
|
@ -131,8 +135,6 @@ add_library(libslic3r STATIC
|
|||
PolygonTrimmer.hpp
|
||||
Polyline.cpp
|
||||
Polyline.hpp
|
||||
PolylineCollection.cpp
|
||||
PolylineCollection.hpp
|
||||
Print.cpp
|
||||
Print.hpp
|
||||
PrintBase.cpp
|
||||
|
@ -142,6 +144,8 @@ add_library(libslic3r STATIC
|
|||
PrintObject.cpp
|
||||
PrintRegion.cpp
|
||||
Semver.cpp
|
||||
ShortestPath.cpp
|
||||
ShortestPath.hpp
|
||||
SLAPrint.cpp
|
||||
SLAPrint.hpp
|
||||
SLA/SLAAutoSupports.hpp
|
||||
|
@ -176,8 +180,13 @@ add_library(libslic3r STATIC
|
|||
miniz_extension.cpp
|
||||
SLA/SLACommon.hpp
|
||||
SLA/SLABoilerPlate.hpp
|
||||
SLA/SLABasePool.hpp
|
||||
SLA/SLABasePool.cpp
|
||||
SLA/SLAPad.hpp
|
||||
SLA/SLAPad.cpp
|
||||
SLA/SLASupportTreeBuilder.hpp
|
||||
SLA/SLASupportTreeBuildsteps.hpp
|
||||
SLA/SLASupportTreeBuildsteps.cpp
|
||||
SLA/SLASupportTreeBuilder.cpp
|
||||
SLA/SLAConcurrency.hpp
|
||||
SLA/SLASupportTree.hpp
|
||||
SLA/SLASupportTree.cpp
|
||||
SLA/SLASupportTreeIGL.cpp
|
||||
|
@ -189,6 +198,8 @@ add_library(libslic3r STATIC
|
|||
SLA/SLARaster.cpp
|
||||
SLA/SLARasterWriter.hpp
|
||||
SLA/SLARasterWriter.cpp
|
||||
SLA/ConcaveHull.hpp
|
||||
SLA/ConcaveHull.cpp
|
||||
)
|
||||
|
||||
encoding_check(libslic3r)
|
||||
|
@ -197,7 +208,7 @@ if (SLIC3R_PCH AND NOT SLIC3R_SYNTAXONLY)
|
|||
add_precompiled_header(libslic3r pchheader.hpp FORCEINCLUDE)
|
||||
endif ()
|
||||
|
||||
target_compile_definitions(libslic3r PUBLIC -DUSE_TBB)
|
||||
target_compile_definitions(libslic3r PUBLIC -DUSE_TBB -DTBB_USE_CAPTURED_EXCEPTION=0)
|
||||
target_include_directories(libslic3r PRIVATE ${CMAKE_CURRENT_SOURCE_DIR} ${LIBNEST2D_INCLUDES} PUBLIC ${CMAKE_CURRENT_BINARY_DIR})
|
||||
target_link_libraries(libslic3r
|
||||
libnest2d
|
||||
|
@ -214,7 +225,9 @@ target_link_libraries(libslic3r
|
|||
poly2tri
|
||||
qhull
|
||||
semver
|
||||
tbb
|
||||
TBB::tbb
|
||||
# OpenVDB::openvdb
|
||||
${CMAKE_DL_LIBS}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#include "ClipperUtils.hpp"
|
||||
#include "Geometry.hpp"
|
||||
#include "ShortestPath.hpp"
|
||||
|
||||
// #define CLIPPER_UTILS_DEBUG
|
||||
|
||||
|
@ -106,8 +107,7 @@ void AddOuterPolyNodeToExPolygons(ClipperLib::PolyNode& polynode, ExPolygons* ex
|
|||
}
|
||||
}
|
||||
|
||||
ExPolygons
|
||||
PolyTreeToExPolygons(ClipperLib::PolyTree& polytree)
|
||||
ExPolygons PolyTreeToExPolygons(ClipperLib::PolyTree& polytree)
|
||||
{
|
||||
ExPolygons retval;
|
||||
for (int i = 0; i < polytree.ChildCount(); ++i)
|
||||
|
@ -150,8 +150,7 @@ Slic3r::Polylines ClipperPaths_to_Slic3rPolylines(const ClipperLib::Paths &input
|
|||
return retval;
|
||||
}
|
||||
|
||||
ExPolygons
|
||||
ClipperPaths_to_Slic3rExPolygons(const ClipperLib::Paths &input)
|
||||
ExPolygons ClipperPaths_to_Slic3rExPolygons(const ClipperLib::Paths &input)
|
||||
{
|
||||
// init Clipper
|
||||
ClipperLib::Clipper clipper;
|
||||
|
@ -166,8 +165,7 @@ ClipperPaths_to_Slic3rExPolygons(const ClipperLib::Paths &input)
|
|||
return PolyTreeToExPolygons(polytree);
|
||||
}
|
||||
|
||||
ClipperLib::Path
|
||||
Slic3rMultiPoint_to_ClipperPath(const MultiPoint &input)
|
||||
ClipperLib::Path Slic3rMultiPoint_to_ClipperPath(const MultiPoint &input)
|
||||
{
|
||||
ClipperLib::Path retval;
|
||||
for (Points::const_iterator pit = input.points.begin(); pit != input.points.end(); ++pit)
|
||||
|
@ -175,8 +173,7 @@ Slic3rMultiPoint_to_ClipperPath(const MultiPoint &input)
|
|||
return retval;
|
||||
}
|
||||
|
||||
ClipperLib::Path
|
||||
Slic3rMultiPoint_to_ClipperPath_reversed(const Slic3r::MultiPoint &input)
|
||||
ClipperLib::Path Slic3rMultiPoint_to_ClipperPath_reversed(const Slic3r::MultiPoint &input)
|
||||
{
|
||||
ClipperLib::Path output;
|
||||
output.reserve(input.points.size());
|
||||
|
@ -193,6 +190,19 @@ ClipperLib::Paths Slic3rMultiPoints_to_ClipperPaths(const Polygons &input)
|
|||
return retval;
|
||||
}
|
||||
|
||||
ClipperLib::Paths Slic3rMultiPoints_to_ClipperPaths(const ExPolygons &input)
|
||||
{
|
||||
ClipperLib::Paths retval;
|
||||
for (auto &ep : input) {
|
||||
retval.emplace_back(Slic3rMultiPoint_to_ClipperPath(ep.contour));
|
||||
|
||||
for (auto &h : ep.holes)
|
||||
retval.emplace_back(Slic3rMultiPoint_to_ClipperPath(h));
|
||||
}
|
||||
|
||||
return retval;
|
||||
}
|
||||
|
||||
ClipperLib::Paths Slic3rMultiPoints_to_ClipperPaths(const Polylines &input)
|
||||
{
|
||||
ClipperLib::Paths retval;
|
||||
|
@ -471,14 +481,16 @@ ExPolygons offset2_ex(const ExPolygons &expolygons, const float delta1,
|
|||
return union_ex(polys);
|
||||
}
|
||||
|
||||
template <class T>
|
||||
T
|
||||
_clipper_do(const ClipperLib::ClipType clipType, const Polygons &subject,
|
||||
const Polygons &clip, const ClipperLib::PolyFillType fillType, const bool safety_offset_)
|
||||
template<class T, class TSubj, class TClip>
|
||||
T _clipper_do(const ClipperLib::ClipType clipType,
|
||||
TSubj && subject,
|
||||
TClip && clip,
|
||||
const ClipperLib::PolyFillType fillType,
|
||||
const bool safety_offset_)
|
||||
{
|
||||
// read input
|
||||
ClipperLib::Paths input_subject = Slic3rMultiPoints_to_ClipperPaths(subject);
|
||||
ClipperLib::Paths input_clip = Slic3rMultiPoints_to_ClipperPaths(clip);
|
||||
ClipperLib::Paths input_subject = Slic3rMultiPoints_to_ClipperPaths(std::forward<TSubj>(subject));
|
||||
ClipperLib::Paths input_clip = Slic3rMultiPoints_to_ClipperPaths(std::forward<TClip>(clip));
|
||||
|
||||
// perform safety offset
|
||||
if (safety_offset_) {
|
||||
|
@ -505,7 +517,7 @@ _clipper_do(const ClipperLib::ClipType clipType, const Polygons &subject,
|
|||
|
||||
// Fix of #117: A large fractal pyramid takes ages to slice
|
||||
// The Clipper library has difficulties processing overlapping polygons.
|
||||
// Namely, the function Clipper::JoinCommonEdges() has potentially a terrible time complexity if the output
|
||||
// Namely, the function ClipperLib::JoinCommonEdges() has potentially a terrible time complexity if the output
|
||||
// of the operation is of the PolyTree type.
|
||||
// This function implmenets a following workaround:
|
||||
// 1) Peform the Clipper operation with the output to Paths. This method handles overlaps in a reasonable time.
|
||||
|
@ -647,12 +659,26 @@ _clipper_ln(ClipperLib::ClipType clipType, const Lines &subject, const Polygons
|
|||
return retval;
|
||||
}
|
||||
|
||||
ClipperLib::PolyTree
|
||||
union_pt(const Polygons &subject, bool safety_offset_)
|
||||
ClipperLib::PolyTree union_pt(const Polygons &subject, bool safety_offset_)
|
||||
{
|
||||
return _clipper_do<ClipperLib::PolyTree>(ClipperLib::ctUnion, subject, Polygons(), ClipperLib::pftEvenOdd, safety_offset_);
|
||||
}
|
||||
|
||||
ClipperLib::PolyTree union_pt(const ExPolygons &subject, bool safety_offset_)
|
||||
{
|
||||
return _clipper_do<ClipperLib::PolyTree>(ClipperLib::ctUnion, subject, Polygons(), ClipperLib::pftEvenOdd, safety_offset_);
|
||||
}
|
||||
|
||||
ClipperLib::PolyTree union_pt(Polygons &&subject, bool safety_offset_)
|
||||
{
|
||||
return _clipper_do<ClipperLib::PolyTree>(ClipperLib::ctUnion, std::move(subject), Polygons(), ClipperLib::pftEvenOdd, safety_offset_);
|
||||
}
|
||||
|
||||
ClipperLib::PolyTree union_pt(ExPolygons &&subject, bool safety_offset_)
|
||||
{
|
||||
return _clipper_do<ClipperLib::PolyTree>(ClipperLib::ctUnion, std::move(subject), Polygons(), ClipperLib::pftEvenOdd, safety_offset_);
|
||||
}
|
||||
|
||||
Polygons
|
||||
union_pt_chained(const Polygons &subject, bool safety_offset_)
|
||||
{
|
||||
|
@ -663,30 +689,123 @@ union_pt_chained(const Polygons &subject, bool safety_offset_)
|
|||
return retval;
|
||||
}
|
||||
|
||||
void traverse_pt(ClipperLib::PolyNodes &nodes, Polygons* retval)
|
||||
static ClipperLib::PolyNodes order_nodes(const ClipperLib::PolyNodes &nodes)
|
||||
{
|
||||
// collect ordering points
|
||||
Points ordering_points;
|
||||
ordering_points.reserve(nodes.size());
|
||||
for (const ClipperLib::PolyNode *node : nodes)
|
||||
ordering_points.emplace_back(Point(node->Contour.front().X, node->Contour.front().Y));
|
||||
|
||||
// perform the ordering
|
||||
ClipperLib::PolyNodes ordered_nodes = chain_clipper_polynodes(ordering_points, nodes);
|
||||
|
||||
return ordered_nodes;
|
||||
}
|
||||
|
||||
enum class e_ordering {
|
||||
ORDER_POLYNODES,
|
||||
DONT_ORDER_POLYNODES
|
||||
};
|
||||
|
||||
template<e_ordering o>
|
||||
void foreach_node(const ClipperLib::PolyNodes &nodes,
|
||||
std::function<void(const ClipperLib::PolyNode *)> fn);
|
||||
|
||||
template<> void foreach_node<e_ordering::DONT_ORDER_POLYNODES>(
|
||||
const ClipperLib::PolyNodes & nodes,
|
||||
std::function<void(const ClipperLib::PolyNode *)> fn)
|
||||
{
|
||||
for (auto &n : nodes) fn(n);
|
||||
}
|
||||
|
||||
template<> void foreach_node<e_ordering::ORDER_POLYNODES>(
|
||||
const ClipperLib::PolyNodes & nodes,
|
||||
std::function<void(const ClipperLib::PolyNode *)> fn)
|
||||
{
|
||||
auto ordered_nodes = order_nodes(nodes);
|
||||
for (auto &n : ordered_nodes) fn(n);
|
||||
}
|
||||
|
||||
template<e_ordering o>
|
||||
void _traverse_pt(const ClipperLib::PolyNodes &nodes, Polygons *retval)
|
||||
{
|
||||
/* use a nearest neighbor search to order these children
|
||||
TODO: supply start_near to chained_path() too? */
|
||||
|
||||
// collect ordering points
|
||||
Points ordering_points;
|
||||
ordering_points.reserve(nodes.size());
|
||||
for (ClipperLib::PolyNodes::const_iterator it = nodes.begin(); it != nodes.end(); ++it) {
|
||||
Point p((*it)->Contour.front().X, (*it)->Contour.front().Y);
|
||||
ordering_points.emplace_back(p);
|
||||
}
|
||||
|
||||
// perform the ordering
|
||||
ClipperLib::PolyNodes ordered_nodes;
|
||||
Slic3r::Geometry::chained_path_items(ordering_points, nodes, ordered_nodes);
|
||||
|
||||
// push results recursively
|
||||
for (ClipperLib::PolyNodes::iterator it = ordered_nodes.begin(); it != ordered_nodes.end(); ++it) {
|
||||
foreach_node<o>(nodes, [&retval](const ClipperLib::PolyNode *node) {
|
||||
// traverse the next depth
|
||||
traverse_pt((*it)->Childs, retval);
|
||||
retval->emplace_back(ClipperPath_to_Slic3rPolygon((*it)->Contour));
|
||||
if ((*it)->IsHole()) retval->back().reverse(); // ccw
|
||||
}
|
||||
_traverse_pt<o>(node->Childs, retval);
|
||||
retval->emplace_back(ClipperPath_to_Slic3rPolygon(node->Contour));
|
||||
if (node->IsHole()) retval->back().reverse(); // ccw
|
||||
});
|
||||
}
|
||||
|
||||
template<e_ordering o>
|
||||
void _traverse_pt(const ClipperLib::PolyNode *tree, ExPolygons *retval)
|
||||
{
|
||||
if (!retval || !tree) return;
|
||||
|
||||
ExPolygons &retv = *retval;
|
||||
|
||||
std::function<void(const ClipperLib::PolyNode*, ExPolygon&)> hole_fn;
|
||||
|
||||
auto contour_fn = [&retv, &hole_fn](const ClipperLib::PolyNode *pptr) {
|
||||
ExPolygon poly;
|
||||
poly.contour.points = ClipperPath_to_Slic3rPolygon(pptr->Contour);
|
||||
auto fn = std::bind(hole_fn, std::placeholders::_1, poly);
|
||||
foreach_node<o>(pptr->Childs, fn);
|
||||
retv.push_back(poly);
|
||||
};
|
||||
|
||||
hole_fn = [&contour_fn](const ClipperLib::PolyNode *pptr, ExPolygon& poly)
|
||||
{
|
||||
poly.holes.emplace_back();
|
||||
poly.holes.back().points = ClipperPath_to_Slic3rPolygon(pptr->Contour);
|
||||
foreach_node<o>(pptr->Childs, contour_fn);
|
||||
};
|
||||
|
||||
contour_fn(tree);
|
||||
}
|
||||
|
||||
template<e_ordering o>
|
||||
void _traverse_pt(const ClipperLib::PolyNodes &nodes, ExPolygons *retval)
|
||||
{
|
||||
// Here is the actual traverse
|
||||
foreach_node<o>(nodes, [&retval](const ClipperLib::PolyNode *node) {
|
||||
_traverse_pt<o>(node, retval);
|
||||
});
|
||||
}
|
||||
|
||||
void traverse_pt(const ClipperLib::PolyNode *tree, ExPolygons *retval)
|
||||
{
|
||||
_traverse_pt<e_ordering::ORDER_POLYNODES>(tree, retval);
|
||||
}
|
||||
|
||||
void traverse_pt_unordered(const ClipperLib::PolyNode *tree, ExPolygons *retval)
|
||||
{
|
||||
_traverse_pt<e_ordering::DONT_ORDER_POLYNODES>(tree, retval);
|
||||
}
|
||||
|
||||
void traverse_pt(const ClipperLib::PolyNodes &nodes, Polygons *retval)
|
||||
{
|
||||
_traverse_pt<e_ordering::ORDER_POLYNODES>(nodes, retval);
|
||||
}
|
||||
|
||||
void traverse_pt(const ClipperLib::PolyNodes &nodes, ExPolygons *retval)
|
||||
{
|
||||
_traverse_pt<e_ordering::ORDER_POLYNODES>(nodes, retval);
|
||||
}
|
||||
|
||||
void traverse_pt_unordered(const ClipperLib::PolyNodes &nodes, Polygons *retval)
|
||||
{
|
||||
_traverse_pt<e_ordering::DONT_ORDER_POLYNODES>(nodes, retval);
|
||||
}
|
||||
|
||||
void traverse_pt_unordered(const ClipperLib::PolyNodes &nodes, ExPolygons *retval)
|
||||
{
|
||||
_traverse_pt<e_ordering::DONT_ORDER_POLYNODES>(nodes, retval);
|
||||
}
|
||||
|
||||
Polygons simplify_polygons(const Polygons &subject, bool preserve_collinear)
|
||||
|
@ -795,4 +914,330 @@ Polygons top_level_islands(const Slic3r::Polygons &polygons)
|
|||
return out;
|
||||
}
|
||||
|
||||
// Outer offset shall not split the input contour into multiples. It is expected, that the solution will be non empty and it will contain just a single polygon.
|
||||
ClipperLib::Paths fix_after_outer_offset(const ClipperLib::Path &input, ClipperLib::PolyFillType filltype, bool reverse_result)
|
||||
{
|
||||
ClipperLib::Paths solution;
|
||||
if (! input.empty()) {
|
||||
ClipperLib::Clipper clipper;
|
||||
clipper.AddPath(input, ClipperLib::ptSubject, true);
|
||||
clipper.ReverseSolution(reverse_result);
|
||||
clipper.Execute(ClipperLib::ctUnion, solution, filltype, filltype);
|
||||
}
|
||||
return solution;
|
||||
}
|
||||
|
||||
// Inner offset may split the source contour into multiple contours, but one shall not be inside the other.
|
||||
ClipperLib::Paths fix_after_inner_offset(const ClipperLib::Path &input, ClipperLib::PolyFillType filltype, bool reverse_result)
|
||||
{
|
||||
ClipperLib::Paths solution;
|
||||
if (! input.empty()) {
|
||||
ClipperLib::Clipper clipper;
|
||||
clipper.AddPath(input, ClipperLib::ptSubject, true);
|
||||
ClipperLib::IntRect r = clipper.GetBounds();
|
||||
r.left -= 10; r.top -= 10; r.right += 10; r.bottom += 10;
|
||||
if (filltype == ClipperLib::pftPositive)
|
||||
clipper.AddPath({ ClipperLib::IntPoint(r.left, r.bottom), ClipperLib::IntPoint(r.left, r.top), ClipperLib::IntPoint(r.right, r.top), ClipperLib::IntPoint(r.right, r.bottom) }, ClipperLib::ptSubject, true);
|
||||
else
|
||||
clipper.AddPath({ ClipperLib::IntPoint(r.left, r.bottom), ClipperLib::IntPoint(r.right, r.bottom), ClipperLib::IntPoint(r.right, r.top), ClipperLib::IntPoint(r.left, r.top) }, ClipperLib::ptSubject, true);
|
||||
clipper.ReverseSolution(reverse_result);
|
||||
clipper.Execute(ClipperLib::ctUnion, solution, filltype, filltype);
|
||||
if (! solution.empty())
|
||||
solution.erase(solution.begin());
|
||||
}
|
||||
return solution;
|
||||
}
|
||||
|
||||
ClipperLib::Path mittered_offset_path_scaled(const Points &contour, const std::vector<float> &deltas, double miter_limit)
|
||||
{
|
||||
assert(contour.size() == deltas.size());
|
||||
|
||||
#ifndef NDEBUG
|
||||
// Verify that the deltas are either all positive, or all negative.
|
||||
bool positive = false;
|
||||
bool negative = false;
|
||||
for (float delta : deltas)
|
||||
if (delta < 0.f)
|
||||
negative = true;
|
||||
else if (delta > 0.f)
|
||||
positive = true;
|
||||
assert(! (negative && positive));
|
||||
#endif /* NDEBUG */
|
||||
|
||||
ClipperLib::Path out;
|
||||
|
||||
if (deltas.size() > 2)
|
||||
{
|
||||
out.reserve(contour.size() * 2);
|
||||
|
||||
// Clamp miter limit to 2.
|
||||
miter_limit = (miter_limit > 2.) ? 2. / (miter_limit * miter_limit) : 0.5;
|
||||
|
||||
// perpenduclar vector
|
||||
auto perp = [](const Vec2d &v) -> Vec2d { return Vec2d(v.y(), - v.x()); };
|
||||
|
||||
// Add a new point to the output, scale by CLIPPER_OFFSET_SCALE and round to ClipperLib::cInt.
|
||||
auto add_offset_point = [&out](Vec2d pt) {
|
||||
pt *= double(CLIPPER_OFFSET_SCALE);
|
||||
pt += Vec2d(0.5 - (pt.x() < 0), 0.5 - (pt.y() < 0));
|
||||
out.emplace_back(ClipperLib::cInt(pt.x()), ClipperLib::cInt(pt.y()));
|
||||
};
|
||||
|
||||
// Minimum edge length, squared.
|
||||
double lmin = *std::max_element(deltas.begin(), deltas.end()) * CLIPPER_OFFSET_SHORTEST_EDGE_FACTOR;
|
||||
double l2min = lmin * lmin;
|
||||
// Minimum angle to consider two edges to be parallel.
|
||||
// Vojtech's estimate.
|
||||
// const double sin_min_parallel = EPSILON + 1. / double(CLIPPER_OFFSET_SCALE);
|
||||
// Implementation equal to Clipper.
|
||||
const double sin_min_parallel = 1.;
|
||||
|
||||
// Find the last point further from pt by l2min.
|
||||
Vec2d pt = contour.front().cast<double>();
|
||||
size_t iprev = contour.size() - 1;
|
||||
Vec2d ptprev;
|
||||
for (; iprev > 0; -- iprev) {
|
||||
ptprev = contour[iprev].cast<double>();
|
||||
if ((ptprev - pt).squaredNorm() > l2min)
|
||||
break;
|
||||
}
|
||||
|
||||
if (iprev != 0) {
|
||||
size_t ilast = iprev;
|
||||
// Normal to the (pt - ptprev) segment.
|
||||
Vec2d nprev = perp(pt - ptprev).normalized();
|
||||
for (size_t i = 0; ; ) {
|
||||
// Find the next point further from pt by l2min.
|
||||
size_t j = i + 1;
|
||||
Vec2d ptnext;
|
||||
for (; j <= ilast; ++ j) {
|
||||
ptnext = contour[j].cast<double>();
|
||||
double l2 = (ptnext - pt).squaredNorm();
|
||||
if (l2 > l2min)
|
||||
break;
|
||||
}
|
||||
if (j > ilast) {
|
||||
assert(i <= ilast);
|
||||
// If the last edge is too short, merge it with the previous edge.
|
||||
i = ilast;
|
||||
ptnext = contour.front().cast<double>();
|
||||
}
|
||||
|
||||
// Normal to the (ptnext - pt) segment.
|
||||
Vec2d nnext = perp(ptnext - pt).normalized();
|
||||
|
||||
double delta = deltas[i];
|
||||
double sin_a = clamp(-1., 1., cross2(nprev, nnext));
|
||||
double convex = sin_a * delta;
|
||||
if (convex <= - sin_min_parallel) {
|
||||
// Concave corner.
|
||||
add_offset_point(pt + nprev * delta);
|
||||
add_offset_point(pt);
|
||||
add_offset_point(pt + nnext * delta);
|
||||
} else {
|
||||
double dot = nprev.dot(nnext);
|
||||
if (convex < sin_min_parallel && dot > 0.) {
|
||||
// Nearly parallel.
|
||||
add_offset_point((nprev.dot(nnext) > 0.) ? (pt + nprev * delta) : pt);
|
||||
} else {
|
||||
// Convex corner, possibly extremely sharp if convex < sin_min_parallel.
|
||||
double r = 1. + dot;
|
||||
if (r >= miter_limit)
|
||||
add_offset_point(pt + (nprev + nnext) * (delta / r));
|
||||
else {
|
||||
double dx = std::tan(std::atan2(sin_a, dot) / 4.);
|
||||
Vec2d newpt1 = pt + (nprev - perp(nprev) * dx) * delta;
|
||||
Vec2d newpt2 = pt + (nnext + perp(nnext) * dx) * delta;
|
||||
#ifndef NDEBUG
|
||||
Vec2d vedge = 0.5 * (newpt1 + newpt2) - pt;
|
||||
double dist_norm = vedge.norm();
|
||||
assert(std::abs(dist_norm - std::abs(delta)) < SCALED_EPSILON);
|
||||
#endif /* NDEBUG */
|
||||
add_offset_point(newpt1);
|
||||
add_offset_point(newpt2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (i == ilast)
|
||||
break;
|
||||
|
||||
ptprev = pt;
|
||||
nprev = nnext;
|
||||
pt = ptnext;
|
||||
i = j;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#if 0
|
||||
{
|
||||
ClipperLib::Path polytmp(out);
|
||||
unscaleClipperPolygon(polytmp);
|
||||
Slic3r::Polygon offsetted = ClipperPath_to_Slic3rPolygon(polytmp);
|
||||
BoundingBox bbox = get_extents(contour);
|
||||
bbox.merge(get_extents(offsetted));
|
||||
static int iRun = 0;
|
||||
SVG svg(debug_out_path("mittered_offset_path_scaled-%d.svg", iRun ++).c_str(), bbox);
|
||||
svg.draw_outline(Polygon(contour), "blue", scale_(0.01));
|
||||
svg.draw_outline(offsetted, "red", scale_(0.01));
|
||||
svg.draw(contour, "blue", scale_(0.03));
|
||||
svg.draw((Points)offsetted, "blue", scale_(0.03));
|
||||
}
|
||||
#endif
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
Polygons variable_offset_inner(const ExPolygon &expoly, const std::vector<std::vector<float>> &deltas, double miter_limit)
|
||||
{
|
||||
#ifndef NDEBUG
|
||||
// Verify that the deltas are all non positive.
|
||||
for (const std::vector<float> &ds : deltas)
|
||||
for (float delta : ds)
|
||||
assert(delta <= 0.);
|
||||
assert(expoly.holes.size() + 1 == deltas.size());
|
||||
#endif /* NDEBUG */
|
||||
|
||||
// 1) Offset the outer contour.
|
||||
ClipperLib::Paths contours = fix_after_inner_offset(mittered_offset_path_scaled(expoly.contour.points, deltas.front(), miter_limit), ClipperLib::pftNegative, true);
|
||||
|
||||
// 2) Offset the holes one by one, collect the results.
|
||||
ClipperLib::Paths holes;
|
||||
holes.reserve(expoly.holes.size());
|
||||
for (const Polygon& hole : expoly.holes)
|
||||
append(holes, fix_after_outer_offset(mittered_offset_path_scaled(hole, deltas[1 + &hole - expoly.holes.data()], miter_limit), ClipperLib::pftPositive, false));
|
||||
|
||||
// 3) Subtract holes from the contours.
|
||||
ClipperLib::Paths output;
|
||||
if (holes.empty())
|
||||
output = std::move(contours);
|
||||
else {
|
||||
ClipperLib::Clipper clipper;
|
||||
clipper.Clear();
|
||||
clipper.AddPaths(contours, ClipperLib::ptSubject, true);
|
||||
clipper.AddPaths(holes, ClipperLib::ptClip, true);
|
||||
clipper.Execute(ClipperLib::ctDifference, output, ClipperLib::pftNonZero, ClipperLib::pftNonZero);
|
||||
}
|
||||
|
||||
// 4) Unscale the output.
|
||||
unscaleClipperPolygons(output);
|
||||
return ClipperPaths_to_Slic3rPolygons(output);
|
||||
}
|
||||
|
||||
Polygons variable_offset_outer(const ExPolygon &expoly, const std::vector<std::vector<float>> &deltas, double miter_limit)
|
||||
{
|
||||
#ifndef NDEBUG
|
||||
// Verify that the deltas are all non positive.
|
||||
for (const std::vector<float>& ds : deltas)
|
||||
for (float delta : ds)
|
||||
assert(delta >= 0.);
|
||||
assert(expoly.holes.size() + 1 == deltas.size());
|
||||
#endif /* NDEBUG */
|
||||
|
||||
// 1) Offset the outer contour.
|
||||
ClipperLib::Paths contours = fix_after_outer_offset(mittered_offset_path_scaled(expoly.contour.points, deltas.front(), miter_limit), ClipperLib::pftPositive, false);
|
||||
|
||||
// 2) Offset the holes one by one, collect the results.
|
||||
ClipperLib::Paths holes;
|
||||
holes.reserve(expoly.holes.size());
|
||||
for (const Polygon& hole : expoly.holes)
|
||||
append(holes, fix_after_inner_offset(mittered_offset_path_scaled(hole, deltas[1 + &hole - expoly.holes.data()], miter_limit), ClipperLib::pftPositive, true));
|
||||
|
||||
// 3) Subtract holes from the contours.
|
||||
ClipperLib::Paths output;
|
||||
if (holes.empty())
|
||||
output = std::move(contours);
|
||||
else {
|
||||
ClipperLib::Clipper clipper;
|
||||
clipper.Clear();
|
||||
clipper.AddPaths(contours, ClipperLib::ptSubject, true);
|
||||
clipper.AddPaths(holes, ClipperLib::ptClip, true);
|
||||
clipper.Execute(ClipperLib::ctDifference, output, ClipperLib::pftNonZero, ClipperLib::pftNonZero);
|
||||
}
|
||||
|
||||
// 4) Unscale the output.
|
||||
unscaleClipperPolygons(output);
|
||||
return ClipperPaths_to_Slic3rPolygons(output);
|
||||
}
|
||||
|
||||
ExPolygons variable_offset_outer_ex(const ExPolygon &expoly, const std::vector<std::vector<float>> &deltas, double miter_limit)
|
||||
{
|
||||
#ifndef NDEBUG
|
||||
// Verify that the deltas are all non positive.
|
||||
for (const std::vector<float>& ds : deltas)
|
||||
for (float delta : ds)
|
||||
assert(delta >= 0.);
|
||||
assert(expoly.holes.size() + 1 == deltas.size());
|
||||
#endif /* NDEBUG */
|
||||
|
||||
// 1) Offset the outer contour.
|
||||
ClipperLib::Paths contours = fix_after_outer_offset(mittered_offset_path_scaled(expoly.contour.points, deltas.front(), miter_limit), ClipperLib::pftPositive, false);
|
||||
|
||||
// 2) Offset the holes one by one, collect the results.
|
||||
ClipperLib::Paths holes;
|
||||
holes.reserve(expoly.holes.size());
|
||||
for (const Polygon& hole : expoly.holes)
|
||||
append(holes, fix_after_inner_offset(mittered_offset_path_scaled(hole, deltas[1 + &hole - expoly.holes.data()], miter_limit), ClipperLib::pftPositive, true));
|
||||
|
||||
// 3) Subtract holes from the contours.
|
||||
unscaleClipperPolygons(contours);
|
||||
ExPolygons output;
|
||||
if (holes.empty()) {
|
||||
output.reserve(contours.size());
|
||||
for (ClipperLib::Path &path : contours)
|
||||
output.emplace_back(ClipperPath_to_Slic3rPolygon(path));
|
||||
} else {
|
||||
ClipperLib::Clipper clipper;
|
||||
unscaleClipperPolygons(holes);
|
||||
clipper.AddPaths(contours, ClipperLib::ptSubject, true);
|
||||
clipper.AddPaths(holes, ClipperLib::ptClip, true);
|
||||
ClipperLib::PolyTree polytree;
|
||||
clipper.Execute(ClipperLib::ctDifference, polytree, ClipperLib::pftNonZero, ClipperLib::pftNonZero);
|
||||
output = PolyTreeToExPolygons(polytree);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
|
||||
ExPolygons variable_offset_inner_ex(const ExPolygon &expoly, const std::vector<std::vector<float>> &deltas, double miter_limit)
|
||||
{
|
||||
#ifndef NDEBUG
|
||||
// Verify that the deltas are all non positive.
|
||||
for (const std::vector<float>& ds : deltas)
|
||||
for (float delta : ds)
|
||||
assert(delta <= 0.);
|
||||
assert(expoly.holes.size() + 1 == deltas.size());
|
||||
#endif /* NDEBUG */
|
||||
|
||||
// 1) Offset the outer contour.
|
||||
ClipperLib::Paths contours = fix_after_inner_offset(mittered_offset_path_scaled(expoly.contour.points, deltas.front(), miter_limit), ClipperLib::pftNegative, false);
|
||||
|
||||
// 2) Offset the holes one by one, collect the results.
|
||||
ClipperLib::Paths holes;
|
||||
holes.reserve(expoly.holes.size());
|
||||
for (const Polygon& hole : expoly.holes)
|
||||
append(holes, fix_after_outer_offset(mittered_offset_path_scaled(hole, deltas[1 + &hole - expoly.holes.data()], miter_limit), ClipperLib::pftNegative, true));
|
||||
|
||||
// 3) Subtract holes from the contours.
|
||||
unscaleClipperPolygons(contours);
|
||||
ExPolygons output;
|
||||
if (holes.empty()) {
|
||||
output.reserve(contours.size());
|
||||
for (ClipperLib::Path &path : contours)
|
||||
output.emplace_back(ClipperPath_to_Slic3rPolygon(path));
|
||||
} else {
|
||||
ClipperLib::Clipper clipper;
|
||||
unscaleClipperPolygons(holes);
|
||||
clipper.AddPaths(contours, ClipperLib::ptSubject, true);
|
||||
clipper.AddPaths(holes, ClipperLib::ptClip, true);
|
||||
ClipperLib::PolyTree polytree;
|
||||
clipper.Execute(ClipperLib::ctDifference, polytree, ClipperLib::pftNonZero, ClipperLib::pftNonZero);
|
||||
output = PolyTreeToExPolygons(polytree);
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ Slic3r::ExPolygons PolyTreeToExPolygons(ClipperLib::PolyTree& polytree);
|
|||
|
||||
ClipperLib::Path Slic3rMultiPoint_to_ClipperPath(const Slic3r::MultiPoint &input);
|
||||
ClipperLib::Paths Slic3rMultiPoints_to_ClipperPaths(const Polygons &input);
|
||||
ClipperLib::Paths Slic3rMultiPoints_to_ClipperPaths(const ExPolygons &input);
|
||||
ClipperLib::Paths Slic3rMultiPoints_to_ClipperPaths(const Polylines &input);
|
||||
Slic3r::Polygon ClipperPath_to_Slic3rPolygon(const ClipperLib::Path &input);
|
||||
Slic3r::Polyline ClipperPath_to_Slic3rPolyline(const ClipperLib::Path &input);
|
||||
|
@ -215,8 +216,19 @@ inline Slic3r::ExPolygons union_ex(const Slic3r::Surfaces &subject, bool safety_
|
|||
|
||||
|
||||
ClipperLib::PolyTree union_pt(const Slic3r::Polygons &subject, bool safety_offset_ = false);
|
||||
ClipperLib::PolyTree union_pt(const Slic3r::ExPolygons &subject, bool safety_offset_ = false);
|
||||
ClipperLib::PolyTree union_pt(Slic3r::Polygons &&subject, bool safety_offset_ = false);
|
||||
ClipperLib::PolyTree union_pt(Slic3r::ExPolygons &&subject, bool safety_offset_ = false);
|
||||
|
||||
Slic3r::Polygons union_pt_chained(const Slic3r::Polygons &subject, bool safety_offset_ = false);
|
||||
void traverse_pt(ClipperLib::PolyNodes &nodes, Slic3r::Polygons* retval);
|
||||
|
||||
void traverse_pt(const ClipperLib::PolyNodes &nodes, Slic3r::Polygons *retval);
|
||||
void traverse_pt(const ClipperLib::PolyNodes &nodes, Slic3r::ExPolygons *retval);
|
||||
void traverse_pt(const ClipperLib::PolyNode *tree, Slic3r::ExPolygons *retval);
|
||||
|
||||
void traverse_pt_unordered(const ClipperLib::PolyNodes &nodes, Slic3r::Polygons *retval);
|
||||
void traverse_pt_unordered(const ClipperLib::PolyNodes &nodes, Slic3r::ExPolygons *retval);
|
||||
void traverse_pt_unordered(const ClipperLib::PolyNode *tree, Slic3r::ExPolygons *retval);
|
||||
|
||||
/* OTHER */
|
||||
Slic3r::Polygons simplify_polygons(const Slic3r::Polygons &subject, bool preserve_collinear = false);
|
||||
|
@ -226,6 +238,11 @@ void safety_offset(ClipperLib::Paths* paths);
|
|||
|
||||
Polygons top_level_islands(const Slic3r::Polygons &polygons);
|
||||
|
||||
Polygons variable_offset_inner(const ExPolygon &expoly, const std::vector<std::vector<float>> &deltas, double miter_limit = 2.);
|
||||
Polygons variable_offset_outer(const ExPolygon &expoly, const std::vector<std::vector<float>> &deltas, double miter_limit = 2.);
|
||||
ExPolygons variable_offset_outer_ex(const ExPolygon &expoly, const std::vector<std::vector<float>> &deltas, double miter_limit = 2.);
|
||||
ExPolygons variable_offset_inner_ex(const ExPolygon &expoly, const std::vector<std::vector<float>> &deltas, double miter_limit = 2.);
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -271,8 +271,6 @@ ConfigOptionDef* ConfigDef::add_nullable(const t_config_option_key &opt_key, Con
|
|||
return def;
|
||||
}
|
||||
|
||||
std::string ConfigOptionDef::nocli = "~~~noCLI";
|
||||
|
||||
std::ostream& ConfigDef::print_cli_help(std::ostream& out, bool show_defaults, std::function<bool(const ConfigOptionDef &)> filter) const
|
||||
{
|
||||
// prepare a function for wrapping text
|
||||
|
@ -427,7 +425,30 @@ std::string ConfigBase::opt_serialize(const t_config_option_key &opt_key) const
|
|||
return opt->serialize();
|
||||
}
|
||||
|
||||
bool ConfigBase::set_deserialize(const t_config_option_key &opt_key_src, const std::string &value_src, bool append)
|
||||
void ConfigBase::set(const std::string &opt_key, int value, bool create)
|
||||
{
|
||||
ConfigOption *opt = this->option_throw(opt_key, create);
|
||||
switch (opt->type()) {
|
||||
case coInt: static_cast<ConfigOptionInt*>(opt)->value = value; break;
|
||||
case coFloat: static_cast<ConfigOptionFloat*>(opt)->value = value; break;
|
||||
case coFloatOrPercent: static_cast<ConfigOptionFloatOrPercent*>(opt)->value = value; static_cast<ConfigOptionFloatOrPercent*>(opt)->percent = false; break;
|
||||
case coString: static_cast<ConfigOptionString*>(opt)->value = std::to_string(value); break;
|
||||
default: throw BadOptionTypeException("Configbase::set() - conversion from int not possible");
|
||||
}
|
||||
}
|
||||
|
||||
void ConfigBase::set(const std::string &opt_key, double value, bool create)
|
||||
{
|
||||
ConfigOption *opt = this->option_throw(opt_key, create);
|
||||
switch (opt->type()) {
|
||||
case coFloat: static_cast<ConfigOptionFloat*>(opt)->value = value; break;
|
||||
case coFloatOrPercent: static_cast<ConfigOptionFloatOrPercent*>(opt)->value = value; static_cast<ConfigOptionFloatOrPercent*>(opt)->percent = false; break;
|
||||
case coString: static_cast<ConfigOptionString*>(opt)->value = std::to_string(value); break;
|
||||
default: throw BadOptionTypeException("Configbase::set() - conversion from float not possible");
|
||||
}
|
||||
}
|
||||
|
||||
bool ConfigBase::set_deserialize_nothrow(const t_config_option_key &opt_key_src, const std::string &value_src, bool append)
|
||||
{
|
||||
t_config_option_key opt_key = opt_key_src;
|
||||
std::string value = value_src;
|
||||
|
@ -440,6 +461,18 @@ bool ConfigBase::set_deserialize(const t_config_option_key &opt_key_src, const s
|
|||
return this->set_deserialize_raw(opt_key, value, append);
|
||||
}
|
||||
|
||||
void ConfigBase::set_deserialize(const t_config_option_key &opt_key_src, const std::string &value_src, bool append)
|
||||
{
|
||||
if (! this->set_deserialize_nothrow(opt_key_src, value_src, append))
|
||||
throw BadOptionTypeException("ConfigBase::set_deserialize() failed");
|
||||
}
|
||||
|
||||
void ConfigBase::set_deserialize(std::initializer_list<SetDeserializeItem> items)
|
||||
{
|
||||
for (const SetDeserializeItem &item : items)
|
||||
this->set_deserialize(item.opt_key, item.opt_value, item.append);
|
||||
}
|
||||
|
||||
bool ConfigBase::set_deserialize_raw(const t_config_option_key &opt_key_src, const std::string &value, bool append)
|
||||
{
|
||||
t_config_option_key opt_key = opt_key_src;
|
||||
|
@ -670,6 +703,12 @@ void ConfigBase::null_nullables()
|
|||
}
|
||||
}
|
||||
|
||||
DynamicConfig::DynamicConfig(const ConfigBase& rhs, const t_config_option_keys& keys)
|
||||
{
|
||||
for (const t_config_option_key& opt_key : keys)
|
||||
this->options[opt_key] = std::unique_ptr<ConfigOption>(rhs.option(opt_key)->clone());
|
||||
}
|
||||
|
||||
bool DynamicConfig::operator==(const DynamicConfig &rhs) const
|
||||
{
|
||||
auto it1 = this->options.begin();
|
||||
|
@ -819,7 +858,7 @@ bool DynamicConfig::read_cli(int argc, char** argv, t_config_option_keys* extra,
|
|||
static_cast<ConfigOptionString*>(opt_base)->value = value;
|
||||
} else {
|
||||
// Any scalar value of a type different from Bool and String.
|
||||
if (! this->set_deserialize(opt_key, value, false)) {
|
||||
if (! this->set_deserialize_nothrow(opt_key, value, false)) {
|
||||
boost::nowide::cerr << "Invalid value supplied for --" << token.c_str() << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -52,6 +52,16 @@ public:
|
|||
std::runtime_error(std::string("No definition exception: ") + opt_key) {}
|
||||
};
|
||||
|
||||
/// Indicate that an unsupported accessor was called on a config option.
|
||||
class BadOptionTypeException : public std::runtime_error
|
||||
{
|
||||
public:
|
||||
BadOptionTypeException() :
|
||||
std::runtime_error("Bad option type exception") {}
|
||||
BadOptionTypeException(const char* message) :
|
||||
std::runtime_error(message) {}
|
||||
};
|
||||
|
||||
// Type of a configuration value.
|
||||
enum ConfigOptionType {
|
||||
coVectorType = 0x4000,
|
||||
|
@ -117,10 +127,10 @@ public:
|
|||
virtual ConfigOption* clone() const = 0;
|
||||
// Set a value from a ConfigOption. The two options should be compatible.
|
||||
virtual void set(const ConfigOption *option) = 0;
|
||||
virtual int getInt() const { throw std::runtime_error("Calling ConfigOption::getInt on a non-int ConfigOption"); }
|
||||
virtual double getFloat() const { throw std::runtime_error("Calling ConfigOption::getFloat on a non-float ConfigOption"); }
|
||||
virtual bool getBool() const { throw std::runtime_error("Calling ConfigOption::getBool on a non-boolean ConfigOption"); }
|
||||
virtual void setInt(int /* val */) { throw std::runtime_error("Calling ConfigOption::setInt on a non-int ConfigOption"); }
|
||||
virtual int getInt() const { throw BadOptionTypeException("Calling ConfigOption::getInt on a non-int ConfigOption"); }
|
||||
virtual double getFloat() const { throw BadOptionTypeException("Calling ConfigOption::getFloat on a non-float ConfigOption"); }
|
||||
virtual bool getBool() const { throw BadOptionTypeException("Calling ConfigOption::getBool on a non-boolean ConfigOption"); }
|
||||
virtual void setInt(int /* val */) { throw BadOptionTypeException("Calling ConfigOption::setInt on a non-int ConfigOption"); }
|
||||
virtual bool operator==(const ConfigOption &rhs) const = 0;
|
||||
bool operator!=(const ConfigOption &rhs) const { return ! (*this == rhs); }
|
||||
bool is_scalar() const { return (int(this->type()) & int(coVectorType)) == 0; }
|
||||
|
@ -1444,7 +1454,7 @@ public:
|
|||
std::vector<std::string> cli_args(const std::string &key) const;
|
||||
|
||||
// Assign this key to cli to disable CLI for this option.
|
||||
static std::string nocli;
|
||||
static const constexpr char *nocli = "~~~noCLI";
|
||||
};
|
||||
|
||||
// Map from a config option name to its definition.
|
||||
|
@ -1513,32 +1523,48 @@ protected:
|
|||
public:
|
||||
// Non-virtual methods:
|
||||
bool has(const t_config_option_key &opt_key) const { return this->option(opt_key) != nullptr; }
|
||||
|
||||
const ConfigOption* option(const t_config_option_key &opt_key) const
|
||||
{ return const_cast<ConfigBase*>(this)->option(opt_key, false); }
|
||||
|
||||
ConfigOption* option(const t_config_option_key &opt_key, bool create = false)
|
||||
{ return this->optptr(opt_key, create); }
|
||||
|
||||
template<typename TYPE>
|
||||
TYPE* option(const t_config_option_key &opt_key, bool create = false)
|
||||
{
|
||||
ConfigOption *opt = this->optptr(opt_key, create);
|
||||
return (opt == nullptr || opt->type() != TYPE::static_type()) ? nullptr : static_cast<TYPE*>(opt);
|
||||
}
|
||||
|
||||
template<typename TYPE>
|
||||
const TYPE* option(const t_config_option_key &opt_key) const
|
||||
{ return const_cast<ConfigBase*>(this)->option<TYPE>(opt_key, false); }
|
||||
template<typename TYPE>
|
||||
TYPE* option_throw(const t_config_option_key &opt_key, bool create = false)
|
||||
|
||||
ConfigOption* option_throw(const t_config_option_key &opt_key, bool create = false)
|
||||
{
|
||||
ConfigOption *opt = this->optptr(opt_key, create);
|
||||
if (opt == nullptr)
|
||||
throw UnknownOptionException(opt_key);
|
||||
return opt;
|
||||
}
|
||||
|
||||
const ConfigOption* option_throw(const t_config_option_key &opt_key) const
|
||||
{ return const_cast<ConfigBase*>(this)->option_throw(opt_key, false); }
|
||||
|
||||
template<typename TYPE>
|
||||
TYPE* option_throw(const t_config_option_key &opt_key, bool create = false)
|
||||
{
|
||||
ConfigOption *opt = this->option_throw(opt_key, create);
|
||||
if (opt->type() != TYPE::static_type())
|
||||
throw std::runtime_error("Conversion to a wrong type");
|
||||
throw BadOptionTypeException("Conversion to a wrong type");
|
||||
return static_cast<TYPE*>(opt);
|
||||
}
|
||||
|
||||
template<typename TYPE>
|
||||
const TYPE* option_throw(const t_config_option_key &opt_key) const
|
||||
{ return const_cast<ConfigBase*>(this)->option_throw<TYPE>(opt_key, false); }
|
||||
|
||||
// Apply all keys of other ConfigBase defined by this->def() to this ConfigBase.
|
||||
// An UnknownOptionException is thrown in case some option keys of other are not defined by this->def(),
|
||||
// or this ConfigBase is of a StaticConfig type and it does not support some of the keys, and ignore_nonexistent is not set.
|
||||
|
@ -1551,9 +1577,40 @@ public:
|
|||
t_config_option_keys diff(const ConfigBase &other) const;
|
||||
t_config_option_keys equal(const ConfigBase &other) const;
|
||||
std::string opt_serialize(const t_config_option_key &opt_key) const;
|
||||
|
||||
// Set a value. Convert numeric types using a C style implicit conversion / promotion model.
|
||||
// Throw if option is not avaiable and create is not enabled,
|
||||
// or if the conversion is not possible.
|
||||
// Conversion to string is always possible.
|
||||
void set(const std::string &opt_key, bool value, bool create = false)
|
||||
{ this->option_throw<ConfigOptionBool>(opt_key, create)->value = value; }
|
||||
void set(const std::string &opt_key, int value, bool create = false);
|
||||
void set(const std::string &opt_key, double value, bool create = false);
|
||||
void set(const std::string &opt_key, const char *value, bool create = false)
|
||||
{ this->option_throw<ConfigOptionString>(opt_key, create)->value = value; }
|
||||
void set(const std::string &opt_key, const std::string &value, bool create = false)
|
||||
{ this->option_throw<ConfigOptionString>(opt_key, create)->value = value; }
|
||||
|
||||
// Set a configuration value from a string, it will call an overridable handle_legacy()
|
||||
// to resolve renamed and removed configuration keys.
|
||||
bool set_deserialize(const t_config_option_key &opt_key, const std::string &str, bool append = false);
|
||||
bool set_deserialize_nothrow(const t_config_option_key &opt_key_src, const std::string &value_src, bool append = false);
|
||||
// May throw BadOptionTypeException() if the operation fails.
|
||||
void set_deserialize(const t_config_option_key &opt_key, const std::string &str, bool append = false);
|
||||
struct SetDeserializeItem {
|
||||
SetDeserializeItem(const char *opt_key, const char *opt_value, bool append = false) : opt_key(opt_key), opt_value(opt_value), append(append) {}
|
||||
SetDeserializeItem(const std::string &opt_key, const std::string &opt_value, bool append = false) : opt_key(opt_key), opt_value(opt_value), append(append) {}
|
||||
SetDeserializeItem(const char *opt_key, const bool value, bool append = false) : opt_key(opt_key), opt_value(value ? "1" : "0"), append(append) {}
|
||||
SetDeserializeItem(const std::string &opt_key, const bool value, bool append = false) : opt_key(opt_key), opt_value(value ? "1" : "0"), append(append) {}
|
||||
SetDeserializeItem(const char *opt_key, const int value, bool append = false) : opt_key(opt_key), opt_value(std::to_string(value)), append(append) {}
|
||||
SetDeserializeItem(const std::string &opt_key, const int value, bool append = false) : opt_key(opt_key), opt_value(std::to_string(value)), append(append) {}
|
||||
SetDeserializeItem(const char *opt_key, const float value, bool append = false) : opt_key(opt_key), opt_value(std::to_string(value)), append(append) {}
|
||||
SetDeserializeItem(const std::string &opt_key, const float value, bool append = false) : opt_key(opt_key), opt_value(std::to_string(value)), append(append) {}
|
||||
SetDeserializeItem(const char *opt_key, const double value, bool append = false) : opt_key(opt_key), opt_value(std::to_string(value)), append(append) {}
|
||||
SetDeserializeItem(const std::string &opt_key, const double value, bool append = false) : opt_key(opt_key), opt_value(std::to_string(value)), append(append) {}
|
||||
std::string opt_key; std::string opt_value; bool append = false;
|
||||
};
|
||||
// May throw BadOptionTypeException() if the operation fails.
|
||||
void set_deserialize(std::initializer_list<SetDeserializeItem> items);
|
||||
|
||||
double get_abs_value(const t_config_option_key &opt_key) const;
|
||||
double get_abs_value(const t_config_option_key &opt_key, double ratio_over) const;
|
||||
|
@ -1580,9 +1637,11 @@ class DynamicConfig : public virtual ConfigBase
|
|||
{
|
||||
public:
|
||||
DynamicConfig() {}
|
||||
DynamicConfig(const DynamicConfig& other) { *this = other; }
|
||||
DynamicConfig(DynamicConfig&& other) : options(std::move(other.options)) { other.options.clear(); }
|
||||
virtual ~DynamicConfig() override { clear(); }
|
||||
DynamicConfig(const DynamicConfig &rhs) { *this = rhs; }
|
||||
DynamicConfig(DynamicConfig &&rhs) : options(std::move(rhs.options)) { rhs.options.clear(); }
|
||||
explicit DynamicConfig(const ConfigBase &rhs, const t_config_option_keys &keys);
|
||||
explicit DynamicConfig(const ConfigBase& rhs) : DynamicConfig(rhs, rhs.keys()) {}
|
||||
virtual ~DynamicConfig() override { clear(); }
|
||||
|
||||
// Copy a content of one DynamicConfig to another DynamicConfig.
|
||||
// If rhs.def() is not null, then it has to be equal to this->def().
|
||||
|
|
|
@ -46,11 +46,29 @@ void EdgeGrid::Grid::create(const Polygons &polygons, coord_t resolution)
|
|||
++ ncontours;
|
||||
|
||||
// Collect the contours.
|
||||
m_contours.assign(ncontours, NULL);
|
||||
m_contours.assign(ncontours, nullptr);
|
||||
ncontours = 0;
|
||||
for (size_t j = 0; j < polygons.size(); ++ j)
|
||||
if (! polygons[j].points.empty())
|
||||
m_contours[ncontours++] = &polygons[j].points;
|
||||
m_contours[ncontours ++] = &polygons[j].points;
|
||||
|
||||
create_from_m_contours(resolution);
|
||||
}
|
||||
|
||||
void EdgeGrid::Grid::create(const std::vector<Points> &polygons, coord_t resolution)
|
||||
{
|
||||
// Count the contours.
|
||||
size_t ncontours = 0;
|
||||
for (size_t j = 0; j < polygons.size(); ++ j)
|
||||
if (! polygons[j].empty())
|
||||
++ ncontours;
|
||||
|
||||
// Collect the contours.
|
||||
m_contours.assign(ncontours, nullptr);
|
||||
ncontours = 0;
|
||||
for (size_t j = 0; j < polygons.size(); ++ j)
|
||||
if (! polygons[j].empty())
|
||||
m_contours[ncontours ++] = &polygons[j];
|
||||
|
||||
create_from_m_contours(resolution);
|
||||
}
|
||||
|
@ -66,7 +84,7 @@ void EdgeGrid::Grid::create(const ExPolygon &expoly, coord_t resolution)
|
|||
++ ncontours;
|
||||
|
||||
// Collect the contours.
|
||||
m_contours.assign(ncontours, NULL);
|
||||
m_contours.assign(ncontours, nullptr);
|
||||
ncontours = 0;
|
||||
if (! expoly.contour.points.empty())
|
||||
m_contours[ncontours++] = &expoly.contour.points;
|
||||
|
@ -91,7 +109,7 @@ void EdgeGrid::Grid::create(const ExPolygons &expolygons, coord_t resolution)
|
|||
}
|
||||
|
||||
// Collect the contours.
|
||||
m_contours.assign(ncontours, NULL);
|
||||
m_contours.assign(ncontours, nullptr);
|
||||
ncontours = 0;
|
||||
for (size_t i = 0; i < expolygons.size(); ++ i) {
|
||||
const ExPolygon &expoly = expolygons[i];
|
||||
|
@ -113,6 +131,7 @@ void EdgeGrid::Grid::create(const ExPolygonCollection &expolygons, coord_t resol
|
|||
// m_contours has been initialized. Now fill in the edge grid.
|
||||
void EdgeGrid::Grid::create_from_m_contours(coord_t resolution)
|
||||
{
|
||||
assert(resolution > 0);
|
||||
// 1) Measure the bounding box.
|
||||
for (size_t i = 0; i < m_contours.size(); ++ i) {
|
||||
const Slic3r::Points &pts = *m_contours[i];
|
||||
|
@ -281,7 +300,11 @@ void EdgeGrid::Grid::create_from_m_contours(coord_t resolution)
|
|||
Visitor(std::vector<std::pair<size_t, size_t>> &cell_data, std::vector<Cell> &cells, size_t cols) :
|
||||
cell_data(cell_data), cells(cells), cols(cols), i(0), j(0) {}
|
||||
|
||||
void operator()(coord_t iy, coord_t ix) { cell_data[cells[iy*cols + ix].end++] = std::pair<size_t, size_t>(i, j); }
|
||||
inline bool operator()(coord_t iy, coord_t ix) {
|
||||
cell_data[cells[iy*cols + ix].end++] = std::pair<size_t, size_t>(i, j);
|
||||
// Continue traversing the grid along the edge.
|
||||
return true;
|
||||
}
|
||||
|
||||
std::vector<std::pair<size_t, size_t>> &cell_data;
|
||||
std::vector<Cell> &cells;
|
||||
|
@ -1017,8 +1040,139 @@ float EdgeGrid::Grid::signed_distance_bilinear(const Point &pt) const
|
|||
|
||||
return f;
|
||||
}
|
||||
|
||||
bool EdgeGrid::Grid::signed_distance_edges(const Point &pt, coord_t search_radius, coordf_t &result_min_dist, bool *pon_segment) const {
|
||||
|
||||
EdgeGrid::Grid::ClosestPointResult EdgeGrid::Grid::closest_point(const Point &pt, coord_t search_radius) const
|
||||
{
|
||||
BoundingBox bbox;
|
||||
bbox.min = bbox.max = Point(pt(0) - m_bbox.min(0), pt(1) - m_bbox.min(1));
|
||||
bbox.defined = true;
|
||||
// Upper boundary, round to grid and test validity.
|
||||
bbox.max(0) += search_radius;
|
||||
bbox.max(1) += search_radius;
|
||||
ClosestPointResult result;
|
||||
if (bbox.max(0) < 0 || bbox.max(1) < 0)
|
||||
return result;
|
||||
bbox.max(0) /= m_resolution;
|
||||
bbox.max(1) /= m_resolution;
|
||||
if ((size_t)bbox.max(0) >= m_cols)
|
||||
bbox.max(0) = m_cols - 1;
|
||||
if ((size_t)bbox.max(1) >= m_rows)
|
||||
bbox.max(1) = m_rows - 1;
|
||||
// Lower boundary, round to grid and test validity.
|
||||
bbox.min(0) -= search_radius;
|
||||
bbox.min(1) -= search_radius;
|
||||
if (bbox.min(0) < 0)
|
||||
bbox.min(0) = 0;
|
||||
if (bbox.min(1) < 0)
|
||||
bbox.min(1) = 0;
|
||||
bbox.min(0) /= m_resolution;
|
||||
bbox.min(1) /= m_resolution;
|
||||
// Is the interval empty?
|
||||
if (bbox.min(0) > bbox.max(0) ||
|
||||
bbox.min(1) > bbox.max(1))
|
||||
return result;
|
||||
// Traverse all cells in the bounding box.
|
||||
double d_min = double(search_radius);
|
||||
// Signum of the distance field at pt.
|
||||
int sign_min = 0;
|
||||
double l2_seg_min = 1.;
|
||||
for (int r = bbox.min(1); r <= bbox.max(1); ++ r) {
|
||||
for (int c = bbox.min(0); c <= bbox.max(0); ++ c) {
|
||||
const Cell &cell = m_cells[r * m_cols + c];
|
||||
for (size_t i = cell.begin; i < cell.end; ++ i) {
|
||||
const size_t contour_idx = m_cell_data[i].first;
|
||||
const Slic3r::Points &pts = *m_contours[contour_idx];
|
||||
size_t ipt = m_cell_data[i].second;
|
||||
// End points of the line segment.
|
||||
const Slic3r::Point &p1 = pts[ipt];
|
||||
const Slic3r::Point &p2 = pts[(ipt + 1 == pts.size()) ? 0 : ipt + 1];
|
||||
const Slic3r::Point v_seg = p2 - p1;
|
||||
const Slic3r::Point v_pt = pt - p1;
|
||||
// dot(p2-p1, pt-p1)
|
||||
int64_t t_pt = int64_t(v_seg(0)) * int64_t(v_pt(0)) + int64_t(v_seg(1)) * int64_t(v_pt(1));
|
||||
// l2 of seg
|
||||
int64_t l2_seg = int64_t(v_seg(0)) * int64_t(v_seg(0)) + int64_t(v_seg(1)) * int64_t(v_seg(1));
|
||||
if (t_pt < 0) {
|
||||
// Closest to p1.
|
||||
double dabs = sqrt(int64_t(v_pt(0)) * int64_t(v_pt(0)) + int64_t(v_pt(1)) * int64_t(v_pt(1)));
|
||||
if (dabs < d_min) {
|
||||
// Previous point.
|
||||
const Slic3r::Point &p0 = pts[(ipt == 0) ? (pts.size() - 1) : ipt - 1];
|
||||
Slic3r::Point v_seg_prev = p1 - p0;
|
||||
int64_t t2_pt = int64_t(v_seg_prev(0)) * int64_t(v_pt(0)) + int64_t(v_seg_prev(1)) * int64_t(v_pt(1));
|
||||
if (t2_pt > 0) {
|
||||
// Inside the wedge between the previous and the next segment.
|
||||
d_min = dabs;
|
||||
// Set the signum depending on whether the vertex is convex or reflex.
|
||||
int64_t det = int64_t(v_seg_prev(0)) * int64_t(v_seg(1)) - int64_t(v_seg_prev(1)) * int64_t(v_seg(0));
|
||||
assert(det != 0);
|
||||
sign_min = (det > 0) ? 1 : -1;
|
||||
result.contour_idx = contour_idx;
|
||||
result.start_point_idx = ipt;
|
||||
result.t = 0.;
|
||||
#ifndef NDEBUG
|
||||
Vec2d vfoot = (p1 - pt).cast<double>();
|
||||
double dist_foot = vfoot.norm();
|
||||
double dist_foot_err = dist_foot - d_min;
|
||||
assert(std::abs(dist_foot_err) < 1e-7 * d_min);
|
||||
#endif /* NDEBUG */
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (t_pt > l2_seg) {
|
||||
// Closest to p2. Then p2 is the starting point of another segment, which shall be discovered in the same cell.
|
||||
continue;
|
||||
} else {
|
||||
// Closest to the segment.
|
||||
assert(t_pt >= 0 && t_pt <= l2_seg);
|
||||
int64_t d_seg = int64_t(v_seg(1)) * int64_t(v_pt(0)) - int64_t(v_seg(0)) * int64_t(v_pt(1));
|
||||
double d = double(d_seg) / sqrt(double(l2_seg));
|
||||
double dabs = std::abs(d);
|
||||
if (dabs < d_min) {
|
||||
d_min = dabs;
|
||||
sign_min = (d_seg < 0) ? -1 : ((d_seg == 0) ? 0 : 1);
|
||||
l2_seg_min = l2_seg;
|
||||
result.contour_idx = contour_idx;
|
||||
result.start_point_idx = ipt;
|
||||
result.t = t_pt;
|
||||
#ifndef NDEBUG
|
||||
Vec2d foot = p1.cast<double>() * (1. - result.t / l2_seg_min) + p2.cast<double>() * (result.t / l2_seg_min);
|
||||
Vec2d vfoot = foot - pt.cast<double>();
|
||||
double dist_foot = vfoot.norm();
|
||||
double dist_foot_err = dist_foot - d_min;
|
||||
assert(std::abs(dist_foot_err) < 1e-7 || std::abs(dist_foot_err) < 1e-7 * d_min);
|
||||
#endif /* NDEBUG */
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (result.contour_idx != -1 && d_min <= double(search_radius)) {
|
||||
result.distance = d_min * sign_min;
|
||||
result.t /= l2_seg_min;
|
||||
assert(result.t >= 0. && result.t < 1.);
|
||||
#ifndef NDEBUG
|
||||
{
|
||||
const Slic3r::Points &pts = *m_contours[result.contour_idx];
|
||||
const Slic3r::Point &p1 = pts[result.start_point_idx];
|
||||
const Slic3r::Point &p2 = pts[(result.start_point_idx + 1 == pts.size()) ? 0 : result.start_point_idx + 1];
|
||||
Vec2d vfoot;
|
||||
if (result.t == 0)
|
||||
vfoot = p1.cast<double>() - pt.cast<double>();
|
||||
else
|
||||
vfoot = p1.cast<double>() * (1. - result.t) + p2.cast<double>() * result.t - pt.cast<double>();
|
||||
double dist_foot = vfoot.norm();
|
||||
double dist_foot_err = dist_foot - std::abs(result.distance);
|
||||
assert(std::abs(dist_foot_err) < 1e-7 || std::abs(dist_foot_err) < 1e-7 * std::abs(result.distance));
|
||||
}
|
||||
#endif /* NDEBUG */
|
||||
} else
|
||||
result = ClosestPointResult();
|
||||
return result;
|
||||
}
|
||||
|
||||
bool EdgeGrid::Grid::signed_distance_edges(const Point &pt, coord_t search_radius, coordf_t &result_min_dist, bool *pon_segment) const
|
||||
{
|
||||
BoundingBox bbox;
|
||||
bbox.min = bbox.max = Point(pt(0) - m_bbox.min(0), pt(1) - m_bbox.min(1));
|
||||
bbox.defined = true;
|
||||
|
@ -1047,7 +1201,7 @@ bool EdgeGrid::Grid::signed_distance_edges(const Point &pt, coord_t search_radiu
|
|||
bbox.min(1) > bbox.max(1))
|
||||
return false;
|
||||
// Traverse all cells in the bounding box.
|
||||
float d_min = search_radius;
|
||||
double d_min = double(search_radius);
|
||||
// Signum of the distance field at pt.
|
||||
int sign_min = 0;
|
||||
bool on_segment = false;
|
||||
|
|
|
@ -21,10 +21,13 @@ public:
|
|||
void set_bbox(const BoundingBox &bbox) { m_bbox = bbox; }
|
||||
|
||||
void create(const Polygons &polygons, coord_t resolution);
|
||||
void create(const std::vector<Points> &polygons, coord_t resolution);
|
||||
void create(const ExPolygon &expoly, coord_t resolution);
|
||||
void create(const ExPolygons &expolygons, coord_t resolution);
|
||||
void create(const ExPolygonCollection &expolygons, coord_t resolution);
|
||||
|
||||
const std::vector<const Slic3r::Points*>& contours() const { return m_contours; }
|
||||
|
||||
#if 0
|
||||
// Test, whether the edges inside the grid intersect with the polygons provided.
|
||||
bool intersect(const MultiPoint &polyline, bool closed);
|
||||
|
@ -46,7 +49,19 @@ public:
|
|||
float signed_distance_bilinear(const Point &pt) const;
|
||||
|
||||
// Calculate a signed distance to the contours in search_radius from the point.
|
||||
bool signed_distance_edges(const Point &pt, coord_t search_radius, coordf_t &result_min_dist, bool *pon_segment = NULL) const;
|
||||
struct ClosestPointResult {
|
||||
size_t contour_idx = size_t(-1);
|
||||
size_t start_point_idx = size_t(-1);
|
||||
// Signed distance to the closest point.
|
||||
double distance = std::numeric_limits<double>::max();
|
||||
// Parameter of the closest point on edge starting with start_point_idx <0, 1)
|
||||
double t = 0.;
|
||||
|
||||
bool valid() const { return contour_idx != size_t(-1); }
|
||||
};
|
||||
ClosestPointResult closest_point(const Point &pt, coord_t search_radius) const;
|
||||
|
||||
bool signed_distance_edges(const Point &pt, coord_t search_radius, coordf_t &result_min_dist, bool *pon_segment = nullptr) const;
|
||||
|
||||
// Calculate a signed distance to the contours in search_radius from the point. If no edge is found in search_radius,
|
||||
// return an interpolated value from m_signed_distance_field, if it exists.
|
||||
|
@ -65,7 +80,7 @@ public:
|
|||
std::vector<std::pair<ContourEdge, ContourEdge>> intersecting_edges() const;
|
||||
bool has_intersecting_edges() const;
|
||||
|
||||
template<typename FUNCTION> void visit_cells_intersecting_line(Slic3r::Point p1, Slic3r::Point p2, FUNCTION func) const
|
||||
template<typename VISITOR> void visit_cells_intersecting_line(Slic3r::Point p1, Slic3r::Point p2, VISITOR &visitor) const
|
||||
{
|
||||
// End points of the line segment.
|
||||
p1(0) -= m_bbox.min(0);
|
||||
|
@ -82,8 +97,7 @@ public:
|
|||
assert(ixb >= 0 && size_t(ixb) < m_cols);
|
||||
assert(iyb >= 0 && size_t(iyb) < m_rows);
|
||||
// Account for the end points.
|
||||
func(iy, ix);
|
||||
if (ix == ixb && iy == iyb)
|
||||
if (! visitor(iy, ix) || (ix == ixb && iy == iyb))
|
||||
// Both ends fall into the same cell.
|
||||
return;
|
||||
// Raster the centeral part of the line.
|
||||
|
@ -113,7 +127,8 @@ public:
|
|||
ey = int64_t(dx) * m_resolution;
|
||||
iy += 1;
|
||||
}
|
||||
func(iy, ix);
|
||||
if (! visitor(iy, ix))
|
||||
return;
|
||||
} while (ix != ixb || iy != iyb);
|
||||
}
|
||||
else {
|
||||
|
@ -131,7 +146,8 @@ public:
|
|||
ey = int64_t(dx) * m_resolution;
|
||||
iy -= 1;
|
||||
}
|
||||
func(iy, ix);
|
||||
if (! visitor(iy, ix))
|
||||
return;
|
||||
} while (ix != ixb || iy != iyb);
|
||||
}
|
||||
}
|
||||
|
@ -153,7 +169,8 @@ public:
|
|||
ey = int64_t(dx) * m_resolution;
|
||||
iy += 1;
|
||||
}
|
||||
func(iy, ix);
|
||||
if (! visitor(iy, ix))
|
||||
return;
|
||||
} while (ix != ixb || iy != iyb);
|
||||
}
|
||||
else {
|
||||
|
@ -185,7 +202,8 @@ public:
|
|||
ey = int64_t(dx) * m_resolution;
|
||||
iy -= 1;
|
||||
}
|
||||
func(iy, ix);
|
||||
if (! visitor(iy, ix))
|
||||
return;
|
||||
} while (ix != ixb || iy != iyb);
|
||||
}
|
||||
}
|
||||
|
|
416
src/libslic3r/ElephantFootCompensation.cpp
Normal file
416
src/libslic3r/ElephantFootCompensation.cpp
Normal file
|
@ -0,0 +1,416 @@
|
|||
#include "clipper/clipper_z.hpp"
|
||||
|
||||
#include "libslic3r.h"
|
||||
#include "ClipperUtils.hpp"
|
||||
#include "EdgeGrid.hpp"
|
||||
#include "ExPolygon.hpp"
|
||||
#include "ElephantFootCompensation.hpp"
|
||||
#include "Flow.hpp"
|
||||
#include "Geometry.hpp"
|
||||
#include "SVG.hpp"
|
||||
|
||||
#include <cmath>
|
||||
#include <cassert>
|
||||
|
||||
// #define CONTOUR_DISTANCE_DEBUG_SVG
|
||||
|
||||
namespace Slic3r {
|
||||
|
||||
struct ResampledPoint {
|
||||
ResampledPoint(size_t idx_src, bool interpolated, double curve_parameter) : idx_src(idx_src), interpolated(interpolated), curve_parameter(curve_parameter) {}
|
||||
|
||||
size_t idx_src;
|
||||
// Is this point interpolated or initial?
|
||||
bool interpolated;
|
||||
// Euclidean distance along the curve from the 0th point.
|
||||
double curve_parameter;
|
||||
};
|
||||
|
||||
std::vector<float> contour_distance(const EdgeGrid::Grid &grid, const size_t idx_contour, const Slic3r::Points &contour, const std::vector<ResampledPoint> &resampled_point_parameters, double search_radius)
|
||||
{
|
||||
assert(! contour.empty());
|
||||
assert(contour.size() >= 2);
|
||||
|
||||
std::vector<float> out;
|
||||
|
||||
if (contour.size() > 2)
|
||||
{
|
||||
#ifdef CONTOUR_DISTANCE_DEBUG_SVG
|
||||
static int iRun = 0;
|
||||
++ iRun;
|
||||
BoundingBox bbox = get_extents(contour);
|
||||
bbox.merge(grid.bbox());
|
||||
ExPolygon expoly_grid;
|
||||
expoly_grid.contour = Polygon(*grid.contours().front());
|
||||
for (size_t i = 1; i < grid.contours().size(); ++ i)
|
||||
expoly_grid.holes.emplace_back(Polygon(*grid.contours()[i]));
|
||||
#endif
|
||||
struct Visitor {
|
||||
Visitor(const EdgeGrid::Grid &grid, const size_t idx_contour, const std::vector<ResampledPoint> &resampled_point_parameters, double dist_same_contour_reject) :
|
||||
grid(grid), idx_contour(idx_contour), resampled_point_parameters(resampled_point_parameters), dist_same_contour_reject(dist_same_contour_reject) {}
|
||||
|
||||
void init(const size_t aidx_point_start, const Point &apt_start, Vec2d dir, const double radius) {
|
||||
this->idx_point_start = aidx_point_start;
|
||||
this->pt = apt_start.cast<double>() + SCALED_EPSILON * dir;
|
||||
dir *= radius;
|
||||
this->pt_start = this->pt.cast<coord_t>();
|
||||
// Trim the vector by the grid's bounding box.
|
||||
const BoundingBox &bbox = this->grid.bbox();
|
||||
double t = 1.;
|
||||
for (size_t axis = 0; axis < 2; ++ axis) {
|
||||
double dx = std::abs(dir(axis));
|
||||
if (dx >= EPSILON) {
|
||||
double tedge = (dir(axis) > 0) ? (double(bbox.max(axis)) - EPSILON - this->pt(axis)) : (this->pt(axis) - double(bbox.min(axis)) - EPSILON);
|
||||
if (tedge < dx)
|
||||
t = tedge / dx;
|
||||
}
|
||||
}
|
||||
this->dir = dir;
|
||||
if (t < 1.)
|
||||
dir *= t;
|
||||
this->pt_end = (this->pt + dir).cast<coord_t>();
|
||||
this->t_min = 1.;
|
||||
}
|
||||
|
||||
bool operator()(coord_t iy, coord_t ix) {
|
||||
// Called with a row and colum of the grid cell, which is intersected by a line.
|
||||
auto cell_data_range = this->grid.cell_data_range(iy, ix);
|
||||
bool valid = true;
|
||||
for (auto it_contour_and_segment = cell_data_range.first; it_contour_and_segment != cell_data_range.second; ++ it_contour_and_segment) {
|
||||
// End points of the line segment and their vector.
|
||||
auto segment = this->grid.segment(*it_contour_and_segment);
|
||||
if (Geometry::segments_intersect(segment.first, segment.second, this->pt_start, this->pt_end)) {
|
||||
// The two segments intersect. Calculate the intersection.
|
||||
Vec2d pt2 = segment.first.cast<double>();
|
||||
Vec2d dir2 = segment.second.cast<double>() - pt2;
|
||||
Vec2d vptpt2 = pt - pt2;
|
||||
double denom = dir(0) * dir2(1) - dir2(0) * dir(1);
|
||||
|
||||
if (std::abs(denom) >= EPSILON) {
|
||||
double t = cross2(dir2, vptpt2) / denom;
|
||||
assert(t > - EPSILON && t < 1. + EPSILON);
|
||||
bool this_valid = true;
|
||||
if (it_contour_and_segment->first == idx_contour) {
|
||||
// The intersected segment originates from the same contour as the starting point.
|
||||
// Reject the intersection if it is close to the starting point.
|
||||
// Find the start and end points of this segment
|
||||
double param_lo = resampled_point_parameters[idx_point_start].curve_parameter;
|
||||
double param_hi;
|
||||
double param_end = resampled_point_parameters.back().curve_parameter;
|
||||
{
|
||||
const Slic3r::Points &ipts = *grid.contours()[it_contour_and_segment->first];
|
||||
size_t ipt = it_contour_and_segment->second;
|
||||
ResampledPoint key(ipt, false, 0.);
|
||||
auto lower = [](const ResampledPoint& l, const ResampledPoint r) { return l.idx_src < r.idx_src || (l.idx_src == r.idx_src && int(l.interpolated) > int(r.interpolated)); };
|
||||
auto it = std::lower_bound(resampled_point_parameters.begin(), resampled_point_parameters.end(), key, lower);
|
||||
assert(it != resampled_point_parameters.end() && it->idx_src == ipt && ! it->interpolated);
|
||||
double t2 = cross2(dir, vptpt2) / denom;
|
||||
assert(t2 > - EPSILON && t2 < 1. + EPSILON);
|
||||
if (++ ipt == ipts.size())
|
||||
param_hi = t2 * dir2.norm();
|
||||
else
|
||||
param_hi = it->curve_parameter + t2 * dir2.norm();
|
||||
}
|
||||
if (param_lo > param_hi)
|
||||
std::swap(param_lo, param_hi);
|
||||
assert(param_lo >= 0. && param_lo <= param_end);
|
||||
assert(param_hi >= 0. && param_hi <= param_end);
|
||||
this_valid = param_hi > param_lo + dist_same_contour_reject && param_hi - param_end < param_lo - dist_same_contour_reject;
|
||||
}
|
||||
if (t < this->t_min) {
|
||||
this->t_min = t;
|
||||
valid = this_valid;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (! valid)
|
||||
this->t_min = 1.;
|
||||
}
|
||||
// Continue traversing the grid along the edge.
|
||||
return true;
|
||||
}
|
||||
|
||||
const EdgeGrid::Grid &grid;
|
||||
const size_t idx_contour;
|
||||
const std::vector<ResampledPoint> &resampled_point_parameters;
|
||||
const double dist_same_contour_reject;
|
||||
|
||||
size_t idx_point_start;
|
||||
Point pt_start;
|
||||
Point pt_end;
|
||||
Vec2d pt;
|
||||
Vec2d dir;
|
||||
// Minium parameter along the vector (pt_end - pt_start).
|
||||
double t_min;
|
||||
} visitor(grid, idx_contour, resampled_point_parameters, search_radius);
|
||||
|
||||
const Point *pt_this = &contour.back();
|
||||
size_t idx_pt_this = contour.size() - 1;
|
||||
const Point *pt_prev = pt_this - 1;
|
||||
// perpenduclar vector
|
||||
auto perp = [](const Vec2d& v) -> Vec2d { return Vec2d(v.y(), -v.x()); };
|
||||
Vec2d vprev = (*pt_this - *pt_prev).cast<double>().normalized();
|
||||
out.reserve(contour.size() + 1);
|
||||
for (const Point &pt_next : contour) {
|
||||
Vec2d vnext = (pt_next - *pt_this).cast<double>().normalized();
|
||||
Vec2d dir = - (perp(vprev) + perp(vnext)).normalized();
|
||||
Vec2d dir_perp = perp(dir);
|
||||
double cross = cross2(vprev, vnext);
|
||||
double dot = vprev.dot(vnext);
|
||||
double a = (cross < 0 || dot > 0.5) ? (M_PI / 3.) : (0.48 * acos(std::min(1., - dot)));
|
||||
// Throw rays, collect distances.
|
||||
std::vector<double> distances;
|
||||
int num_rays = 15;
|
||||
|
||||
#ifdef CONTOUR_DISTANCE_DEBUG_SVG
|
||||
SVG svg(debug_out_path("contour_distance_raycasted-%d-%d.svg", iRun, &pt_next - contour.data()).c_str(), bbox);
|
||||
svg.draw(expoly_grid);
|
||||
svg.draw_outline(Polygon(contour), "blue", scale_(0.01));
|
||||
svg.draw(*pt_this, "red", scale_(0.1));
|
||||
#endif /* CONTOUR_DISTANCE_DEBUG_SVG */
|
||||
|
||||
for (int i = - num_rays + 1; i < num_rays; ++ i) {
|
||||
double angle = a * i / (int)num_rays;
|
||||
double c = cos(angle);
|
||||
double s = sin(angle);
|
||||
Vec2d v = c * dir + s * dir_perp;
|
||||
visitor.init(idx_pt_this, *pt_this, v, search_radius);
|
||||
grid.visit_cells_intersecting_line(visitor.pt_start, visitor.pt_end, visitor);
|
||||
distances.emplace_back(visitor.t_min);
|
||||
#ifdef CONTOUR_DISTANCE_DEBUG_SVG
|
||||
svg.draw(Line(visitor.pt_start, visitor.pt_end), "yellow", scale_(0.01));
|
||||
if (visitor.t_min < 1.) {
|
||||
Vec2d pt = visitor.pt + visitor.dir * visitor.t_min;
|
||||
svg.draw(Point(pt), "red", scale_(0.1));
|
||||
}
|
||||
#endif /* CONTOUR_DISTANCE_DEBUG_SVG */
|
||||
}
|
||||
#ifdef CONTOUR_DISTANCE_DEBUG_SVG
|
||||
svg.Close();
|
||||
#endif /* CONTOUR_DISTANCE_DEBUG_SVG */
|
||||
std::sort(distances.begin(), distances.end());
|
||||
#if 0
|
||||
double median = distances[distances.size() / 2];
|
||||
double standard_deviation = 0;
|
||||
for (double d : distances)
|
||||
standard_deviation += (d - median) * (d - median);
|
||||
standard_deviation = sqrt(standard_deviation / (distances.size() - 1));
|
||||
double avg = 0;
|
||||
size_t cnt = 0;
|
||||
for (double d : distances)
|
||||
if (d > median - standard_deviation - EPSILON && d < median + standard_deviation + EPSILON) {
|
||||
avg += d;
|
||||
++ cnt;
|
||||
}
|
||||
avg /= double(cnt);
|
||||
out.emplace_back(float(avg * search_radius));
|
||||
#else
|
||||
out.emplace_back(float(distances.front() * search_radius));
|
||||
#endif
|
||||
#ifdef CONTOUR_DISTANCE_DEBUG_SVG
|
||||
printf("contour_distance_raycasted-%d-%d.svg - distance %lf\n", iRun, &pt_next - contour.data(), unscale<double>(out.back()));
|
||||
#endif /* CONTOUR_DISTANCE_DEBUG_SVG */
|
||||
pt_this = &pt_next;
|
||||
idx_pt_this = &pt_next - contour.data();
|
||||
vprev = vnext;
|
||||
}
|
||||
// Rotate the vector by one item.
|
||||
out.emplace_back(out.front());
|
||||
out.erase(out.begin());
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
Points resample_polygon(const Points &contour, double dist, std::vector<ResampledPoint> &resampled_point_parameters)
|
||||
{
|
||||
Points out;
|
||||
out.reserve(contour.size());
|
||||
resampled_point_parameters.reserve(contour.size());
|
||||
if (contour.size() > 2) {
|
||||
Vec2d pt_prev = contour.back().cast<double>();
|
||||
for (const Point &pt : contour) {
|
||||
size_t idx_this = &pt - contour.data();
|
||||
const Vec2d pt_this = pt.cast<double>();
|
||||
const Vec2d v = pt_this - pt_prev;
|
||||
const double l = v.norm();
|
||||
const size_t n = size_t(ceil(l / dist));
|
||||
const double l_step = l / n;
|
||||
for (size_t i = 1; i < n; ++ i) {
|
||||
double interpolation_parameter = double(i) / n;
|
||||
Vec2d new_pt = pt_prev + v * interpolation_parameter;
|
||||
out.emplace_back(new_pt.cast<coord_t>());
|
||||
resampled_point_parameters.emplace_back(idx_this, true, l_step);
|
||||
}
|
||||
out.emplace_back(pt);
|
||||
resampled_point_parameters.emplace_back(idx_this, false, l_step);
|
||||
pt_prev = pt_this;
|
||||
}
|
||||
for (size_t i = 1; i < resampled_point_parameters.size(); ++i)
|
||||
resampled_point_parameters[i].curve_parameter += resampled_point_parameters[i - 1].curve_parameter;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
static inline void smooth_compensation(std::vector<float> &compensation, float strength, size_t num_iterations)
|
||||
{
|
||||
std::vector<float> out(compensation);
|
||||
for (size_t iter = 0; iter < num_iterations; ++ iter) {
|
||||
for (size_t i = 0; i < compensation.size(); ++ i) {
|
||||
float prev = (i == 0) ? compensation.back() : compensation[i - 1];
|
||||
float next = (i + 1 == compensation.size()) ? compensation.front() : compensation[i + 1];
|
||||
float laplacian = compensation[i] * (1.f - strength) + 0.5f * strength * (prev + next);
|
||||
// Compensations are negative. Only apply the laplacian if it leads to lower compensation.
|
||||
out[i] = std::max(laplacian, compensation[i]);
|
||||
}
|
||||
out.swap(compensation);
|
||||
}
|
||||
}
|
||||
|
||||
template<typename INDEX_TYPE, typename CONTAINER>
|
||||
static inline INDEX_TYPE prev_idx_cyclic(INDEX_TYPE idx, const CONTAINER &container)
|
||||
{
|
||||
if (idx == 0)
|
||||
idx = INDEX_TYPE(container.size());
|
||||
return -- idx;
|
||||
}
|
||||
|
||||
template<typename INDEX_TYPE, typename CONTAINER>
|
||||
static inline INDEX_TYPE next_idx_cyclic(INDEX_TYPE idx, const CONTAINER &container)
|
||||
{
|
||||
if (++ idx == INDEX_TYPE(container.size()))
|
||||
idx = 0;
|
||||
return idx;
|
||||
}
|
||||
|
||||
template<class T, class U = T>
|
||||
static inline T exchange(T& obj, U&& new_value)
|
||||
{
|
||||
T old_value = std::move(obj);
|
||||
obj = std::forward<U>(new_value);
|
||||
return old_value;
|
||||
}
|
||||
|
||||
static inline void smooth_compensation_banded(const Points &contour, float band, std::vector<float> &compensation, float strength, size_t num_iterations)
|
||||
{
|
||||
assert(contour.size() == compensation.size());
|
||||
assert(contour.size() > 2);
|
||||
std::vector<float> out(compensation);
|
||||
float dist_min2 = band * band;
|
||||
static constexpr bool use_min = false;
|
||||
for (size_t iter = 0; iter < num_iterations; ++ iter) {
|
||||
for (int i = 0; i < int(compensation.size()); ++ i) {
|
||||
const Vec2f pthis = contour[i].cast<float>();
|
||||
|
||||
int j = prev_idx_cyclic(i, contour);
|
||||
Vec2f pprev = contour[j].cast<float>();
|
||||
float prev = compensation[j];
|
||||
float l2 = (pthis - pprev).squaredNorm();
|
||||
if (l2 < dist_min2) {
|
||||
float l = sqrt(l2);
|
||||
int jprev = exchange(j, prev_idx_cyclic(j, contour));
|
||||
while (j != i) {
|
||||
const Vec2f pp = contour[j].cast<float>();
|
||||
const float lthis = (pp - pprev).norm();
|
||||
const float lnext = l + lthis;
|
||||
if (lnext > band) {
|
||||
// Interpolate the compensation value.
|
||||
prev = use_min ?
|
||||
std::min(prev, lerp(compensation[jprev], compensation[j], (band - l) / lthis)) :
|
||||
lerp(compensation[jprev], compensation[j], (band - l) / lthis);
|
||||
break;
|
||||
}
|
||||
prev = use_min ? std::min(prev, compensation[j]) : compensation[j];
|
||||
pprev = pp;
|
||||
l = lnext;
|
||||
jprev = exchange(j, prev_idx_cyclic(j, contour));
|
||||
}
|
||||
}
|
||||
|
||||
j = next_idx_cyclic(i, contour);
|
||||
pprev = contour[j].cast<float>();
|
||||
float next = compensation[j];
|
||||
l2 = (pprev - pthis).squaredNorm();
|
||||
if (l2 < dist_min2) {
|
||||
float l = sqrt(l2);
|
||||
int jprev = exchange(j, next_idx_cyclic(j, contour));
|
||||
while (j != i) {
|
||||
const Vec2f pp = contour[j].cast<float>();
|
||||
const float lthis = (pp - pprev).norm();
|
||||
const float lnext = l + lthis;
|
||||
if (lnext > band) {
|
||||
// Interpolate the compensation value.
|
||||
next = use_min ?
|
||||
std::min(next, lerp(compensation[jprev], compensation[j], (band - l) / lthis)) :
|
||||
lerp(compensation[jprev], compensation[j], (band - l) / lthis);
|
||||
break;
|
||||
}
|
||||
next = use_min ? std::min(next, compensation[j]) : compensation[j];
|
||||
pprev = pp;
|
||||
l = lnext;
|
||||
jprev = exchange(j, next_idx_cyclic(j, contour));
|
||||
}
|
||||
}
|
||||
|
||||
float laplacian = compensation[i] * (1.f - strength) + 0.5f * strength * (prev + next);
|
||||
// Compensations are negative. Only apply the laplacian if it leads to lower compensation.
|
||||
out[i] = std::max(laplacian, compensation[i]);
|
||||
}
|
||||
out.swap(compensation);
|
||||
}
|
||||
}
|
||||
|
||||
ExPolygon elephant_foot_compensation(const ExPolygon &input_expoly, const Flow &external_perimeter_flow, const double compensation)
|
||||
{
|
||||
// The contour shall be wide enough to apply the external perimeter plus compensation on both sides.
|
||||
double min_contour_width = double(external_perimeter_flow.scaled_width() + external_perimeter_flow.scaled_spacing());
|
||||
double scaled_compensation = scale_(compensation);
|
||||
double min_contour_width_compensated = min_contour_width + 2. * scaled_compensation;
|
||||
// Make the search radius a bit larger for the averaging in contour_distance over a fan of rays to work.
|
||||
double search_radius = min_contour_width_compensated + min_contour_width * 0.5;
|
||||
|
||||
EdgeGrid::Grid grid;
|
||||
ExPolygon simplified = input_expoly.simplify(SCALED_EPSILON).front();
|
||||
BoundingBox bbox = get_extents(simplified.contour);
|
||||
bbox.offset(SCALED_EPSILON);
|
||||
grid.set_bbox(bbox);
|
||||
grid.create(simplified, coord_t(0.7 * search_radius));
|
||||
std::vector<std::vector<float>> deltas;
|
||||
deltas.reserve(simplified.holes.size() + 1);
|
||||
ExPolygon resampled(simplified);
|
||||
double resample_interval = scale_(0.5);
|
||||
for (size_t idx_contour = 0; idx_contour <= simplified.holes.size(); ++ idx_contour) {
|
||||
Polygon &poly = (idx_contour == 0) ? resampled.contour : resampled.holes[idx_contour - 1];
|
||||
std::vector<ResampledPoint> resampled_point_parameters;
|
||||
poly.points = resample_polygon(poly.points, resample_interval, resampled_point_parameters);
|
||||
std::vector<float> dists = contour_distance(grid, idx_contour, poly.points, resampled_point_parameters, search_radius);
|
||||
for (float &d : dists) {
|
||||
// printf("Point %d, Distance: %lf\n", int(&d - dists.data()), unscale<double>(d));
|
||||
// Convert contour width to available compensation distance.
|
||||
if (d < min_contour_width)
|
||||
d = 0.f;
|
||||
else if (d > min_contour_width_compensated)
|
||||
d = - float(scaled_compensation);
|
||||
else
|
||||
d = - (d - float(min_contour_width)) / 2.f;
|
||||
assert(d >= - float(scaled_compensation) && d <= 0.f);
|
||||
}
|
||||
// smooth_compensation(dists, 0.4f, 10);
|
||||
smooth_compensation_banded(poly.points, float(0.8 * resample_interval), dists, 0.3f, 3);
|
||||
deltas.emplace_back(dists);
|
||||
}
|
||||
|
||||
ExPolygons out = variable_offset_inner_ex(resampled, deltas, 2.);
|
||||
return out.front();
|
||||
}
|
||||
|
||||
ExPolygons elephant_foot_compensation(const ExPolygons &input, const Flow &external_perimeter_flow, const double compensation)
|
||||
{
|
||||
ExPolygons out;
|
||||
out.reserve(input.size());
|
||||
for (const ExPolygon &expoly : input)
|
||||
out.emplace_back(elephant_foot_compensation(expoly, external_perimeter_flow, compensation));
|
||||
return out;
|
||||
}
|
||||
|
||||
} // namespace Slic3r
|
16
src/libslic3r/ElephantFootCompensation.hpp
Normal file
16
src/libslic3r/ElephantFootCompensation.hpp
Normal file
|
@ -0,0 +1,16 @@
|
|||
#ifndef slic3r_ElephantFootCompensation_hpp_
|
||||
#define slic3r_ElephantFootCompensation_hpp_
|
||||
|
||||
#include "libslic3r.h"
|
||||
#include <vector>
|
||||
|
||||
namespace Slic3r {
|
||||
|
||||
class Flow;
|
||||
|
||||
ExPolygon elephant_foot_compensation(const ExPolygon &input, const Flow &external_perimeter_flow, const double compensation);
|
||||
ExPolygons elephant_foot_compensation(const ExPolygons &input, const Flow &external_perimeter_flow, const double compensation);
|
||||
|
||||
} // Slic3r
|
||||
|
||||
#endif /* slic3r_ElephantFootCompensation_hpp_ */
|
|
@ -18,8 +18,18 @@ class ExPolygon
|
|||
{
|
||||
public:
|
||||
ExPolygon() {}
|
||||
ExPolygon(const ExPolygon &other) : contour(other.contour), holes(other.holes) {}
|
||||
ExPolygon(const ExPolygon &other) : contour(other.contour), holes(other.holes) {}
|
||||
ExPolygon(ExPolygon &&other) : contour(std::move(other.contour)), holes(std::move(other.holes)) {}
|
||||
explicit ExPolygon(const Polygon &contour) : contour(contour) {}
|
||||
explicit ExPolygon(Polygon &&contour) : contour(std::move(contour)) {}
|
||||
explicit ExPolygon(const Points &contour) : contour(contour) {}
|
||||
explicit ExPolygon(Points &&contour) : contour(std::move(contour)) {}
|
||||
explicit ExPolygon(const Polygon &contour, const Polygon &hole) : contour(contour) { holes.emplace_back(hole); }
|
||||
explicit ExPolygon(Polygon &&contour, Polygon &&hole) : contour(std::move(contour)) { holes.emplace_back(std::move(hole)); }
|
||||
explicit ExPolygon(const Points &contour, const Points &hole) : contour(contour) { holes.emplace_back(hole); }
|
||||
explicit ExPolygon(Points &&contour, Polygon &&hole) : contour(std::move(contour)) { holes.emplace_back(std::move(hole)); }
|
||||
ExPolygon(std::initializer_list<Point> contour) : contour(contour) {}
|
||||
ExPolygon(std::initializer_list<Point> contour, std::initializer_list<Point> hole) : contour(contour), holes({ hole }) {}
|
||||
|
||||
ExPolygon& operator=(const ExPolygon &other) { contour = other.contour; holes = other.holes; return *this; }
|
||||
ExPolygon& operator=(ExPolygon &&other) { contour = std::move(other.contour); holes = std::move(other.holes); return *this; }
|
||||
|
@ -67,8 +77,16 @@ public:
|
|||
void triangulate_pp(Points *triangles) const;
|
||||
void triangulate_p2t(Polygons* polygons) const;
|
||||
Lines lines() const;
|
||||
|
||||
// Number of contours (outer contour with holes).
|
||||
size_t num_contours() const { return this->holes.size() + 1; }
|
||||
Polygon& contour_or_hole(size_t idx) { return (idx == 0) ? this->contour : this->holes[idx - 1]; }
|
||||
const Polygon& contour_or_hole(size_t idx) const { return (idx == 0) ? this->contour : this->holes[idx - 1]; }
|
||||
};
|
||||
|
||||
inline bool operator==(const ExPolygon &lhs, const ExPolygon &rhs) { return lhs.contour == rhs.contour && lhs.holes == rhs.holes; }
|
||||
inline bool operator!=(const ExPolygon &lhs, const ExPolygon &rhs) { return lhs.contour != rhs.contour || lhs.holes != rhs.holes; }
|
||||
|
||||
// Count a nuber of polygons stored inside the vector of expolygons.
|
||||
// Useful for allocating space for polygons when converting expolygons to polygons.
|
||||
inline size_t number_polygons(const ExPolygons &expolys)
|
||||
|
@ -293,6 +311,15 @@ inline bool expolygons_contain(ExPolygons &expolys, const Point &pt)
|
|||
return false;
|
||||
}
|
||||
|
||||
inline ExPolygons expolygons_simplify(const ExPolygons &expolys, double tolerance)
|
||||
{
|
||||
ExPolygons out;
|
||||
out.reserve(expolys.size());
|
||||
for (const ExPolygon &exp : expolys)
|
||||
exp.simplify(tolerance, &out);
|
||||
return out;
|
||||
}
|
||||
|
||||
extern BoundingBox get_extents(const ExPolygon &expolygon);
|
||||
extern BoundingBox get_extents(const ExPolygons &expolygons);
|
||||
extern BoundingBox get_extents_rotated(const ExPolygon &poly, double angle);
|
||||
|
|
|
@ -11,7 +11,7 @@ ExPolygonCollection::ExPolygonCollection(const ExPolygon &expolygon)
|
|||
ExPolygonCollection::operator Points() const
|
||||
{
|
||||
Points points;
|
||||
Polygons pp = *this;
|
||||
Polygons pp = (Polygons)*this;
|
||||
for (Polygons::const_iterator poly = pp.begin(); poly != pp.end(); ++poly) {
|
||||
for (Points::const_iterator point = poly->points.begin(); point != poly->points.end(); ++point)
|
||||
points.push_back(*point);
|
||||
|
|
|
@ -13,15 +13,15 @@ typedef std::vector<ExPolygonCollection> ExPolygonCollections;
|
|||
|
||||
class ExPolygonCollection
|
||||
{
|
||||
public:
|
||||
public:
|
||||
ExPolygons expolygons;
|
||||
|
||||
ExPolygonCollection() {};
|
||||
ExPolygonCollection(const ExPolygon &expolygon);
|
||||
ExPolygonCollection(const ExPolygons &expolygons) : expolygons(expolygons) {};
|
||||
operator Points() const;
|
||||
operator Polygons() const;
|
||||
operator ExPolygons&();
|
||||
ExPolygonCollection() {}
|
||||
explicit ExPolygonCollection(const ExPolygon &expolygon);
|
||||
explicit ExPolygonCollection(const ExPolygons &expolygons) : expolygons(expolygons) {}
|
||||
explicit operator Points() const;
|
||||
explicit operator Polygons() const;
|
||||
explicit operator ExPolygons&();
|
||||
void scale(double factor);
|
||||
void translate(double x, double y);
|
||||
void rotate(double angle, const Point ¢er);
|
||||
|
|
|
@ -14,12 +14,12 @@ namespace Slic3r {
|
|||
|
||||
void ExtrusionPath::intersect_expolygons(const ExPolygonCollection &collection, ExtrusionEntityCollection* retval) const
|
||||
{
|
||||
this->_inflate_collection(intersection_pl(this->polyline, collection), retval);
|
||||
this->_inflate_collection(intersection_pl(this->polyline, (Polygons)collection), retval);
|
||||
}
|
||||
|
||||
void ExtrusionPath::subtract_expolygons(const ExPolygonCollection &collection, ExtrusionEntityCollection* retval) const
|
||||
{
|
||||
this->_inflate_collection(diff_pl(this->polyline, collection), retval);
|
||||
this->_inflate_collection(diff_pl(this->polyline, (Polygons)collection), retval);
|
||||
}
|
||||
|
||||
void ExtrusionPath::clip_end(double distance)
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
#include "Polygon.hpp"
|
||||
#include "Polyline.hpp"
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
namespace Slic3r {
|
||||
|
||||
class ExPolygonCollection;
|
||||
|
@ -12,7 +14,7 @@ class ExtrusionEntityCollection;
|
|||
class Extruder;
|
||||
|
||||
// Each ExtrusionRole value identifies a distinct set of { extruder, speed }
|
||||
enum ExtrusionRole {
|
||||
enum ExtrusionRole : uint8_t {
|
||||
erNone,
|
||||
erPerimeter,
|
||||
erExternalPerimeter,
|
||||
|
@ -79,8 +81,8 @@ public:
|
|||
virtual ExtrusionEntity* clone_move() = 0;
|
||||
virtual ~ExtrusionEntity() {}
|
||||
virtual void reverse() = 0;
|
||||
virtual Point first_point() const = 0;
|
||||
virtual Point last_point() const = 0;
|
||||
virtual const Point& first_point() const = 0;
|
||||
virtual const Point& last_point() const = 0;
|
||||
// Produce a list of 2D polygons covered by the extruded paths, offsetted by the extrusion width.
|
||||
// Increase the offset by scaled_epsilon to achieve an overlap, so a union will produce no gaps.
|
||||
virtual void polygons_covered_by_width(Polygons &out, const float scaled_epsilon) const = 0;
|
||||
|
@ -115,30 +117,23 @@ public:
|
|||
float width;
|
||||
// Height of the extrusion, used for visualization purposes.
|
||||
float height;
|
||||
// Feedrate of the extrusion, used for visualization purposes.
|
||||
float feedrate;
|
||||
// Id of the extruder, used for visualization purposes.
|
||||
unsigned int extruder_id;
|
||||
// Id of the color, used for visualization purposes in the color printing case.
|
||||
unsigned int cp_color_id;
|
||||
|
||||
ExtrusionPath(ExtrusionRole role) : mm3_per_mm(-1), width(-1), height(-1), feedrate(0.0f), extruder_id(0), cp_color_id(0), m_role(role) {}
|
||||
ExtrusionPath(ExtrusionRole role, double mm3_per_mm, float width, float height) : mm3_per_mm(mm3_per_mm), width(width), height(height), feedrate(0.0f), extruder_id(0), cp_color_id(0), m_role(role) {}
|
||||
ExtrusionPath(const ExtrusionPath &rhs) : polyline(rhs.polyline), mm3_per_mm(rhs.mm3_per_mm), width(rhs.width), height(rhs.height), feedrate(rhs.feedrate), extruder_id(rhs.extruder_id), cp_color_id(rhs.cp_color_id), m_role(rhs.m_role) {}
|
||||
ExtrusionPath(const Polyline &polyline, const ExtrusionPath &rhs) : polyline(polyline), mm3_per_mm(rhs.mm3_per_mm), width(rhs.width), height(rhs.height), feedrate(rhs.feedrate), extruder_id(rhs.extruder_id), cp_color_id(rhs.cp_color_id), m_role(rhs.m_role) {}
|
||||
ExtrusionPath(ExtrusionPath &&rhs) : polyline(std::move(rhs.polyline)), mm3_per_mm(rhs.mm3_per_mm), width(rhs.width), height(rhs.height), feedrate(rhs.feedrate), extruder_id(rhs.extruder_id), cp_color_id(rhs.cp_color_id), m_role(rhs.m_role) {}
|
||||
ExtrusionPath(Polyline &&polyline, const ExtrusionPath &rhs) : polyline(std::move(polyline)), mm3_per_mm(rhs.mm3_per_mm), width(rhs.width), height(rhs.height), feedrate(rhs.feedrate), extruder_id(rhs.extruder_id), cp_color_id(rhs.cp_color_id), m_role(rhs.m_role) {}
|
||||
// ExtrusionPath(ExtrusionRole role, const Flow &flow) : m_role(role), mm3_per_mm(flow.mm3_per_mm()), width(flow.width), height(flow.height), feedrate(0.0f), extruder_id(0) {};
|
||||
ExtrusionPath(ExtrusionRole role) : mm3_per_mm(-1), width(-1), height(-1), m_role(role) {};
|
||||
ExtrusionPath(ExtrusionRole role, double mm3_per_mm, float width, float height) : mm3_per_mm(mm3_per_mm), width(width), height(height), m_role(role) {};
|
||||
ExtrusionPath(const ExtrusionPath& rhs) : polyline(rhs.polyline), mm3_per_mm(rhs.mm3_per_mm), width(rhs.width), height(rhs.height), m_role(rhs.m_role) {}
|
||||
ExtrusionPath(ExtrusionPath&& rhs) : polyline(std::move(rhs.polyline)), mm3_per_mm(rhs.mm3_per_mm), width(rhs.width), height(rhs.height), m_role(rhs.m_role) {}
|
||||
ExtrusionPath(const Polyline &polyline, const ExtrusionPath &rhs) : polyline(polyline), mm3_per_mm(rhs.mm3_per_mm), width(rhs.width), height(rhs.height), m_role(rhs.m_role) {}
|
||||
ExtrusionPath(Polyline &&polyline, const ExtrusionPath &rhs) : polyline(std::move(polyline)), mm3_per_mm(rhs.mm3_per_mm), width(rhs.width), height(rhs.height), m_role(rhs.m_role) {}
|
||||
|
||||
ExtrusionPath& operator=(const ExtrusionPath &rhs) { m_role = rhs.m_role; this->mm3_per_mm = rhs.mm3_per_mm; this->width = rhs.width; this->height = rhs.height; this->feedrate = rhs.feedrate; this->extruder_id = rhs.extruder_id; this->cp_color_id = rhs.cp_color_id; this->polyline = rhs.polyline; return *this; }
|
||||
ExtrusionPath& operator=(ExtrusionPath &&rhs) { m_role = rhs.m_role; this->mm3_per_mm = rhs.mm3_per_mm; this->width = rhs.width; this->height = rhs.height; this->feedrate = rhs.feedrate; this->extruder_id = rhs.extruder_id; this->cp_color_id = rhs.cp_color_id; this->polyline = std::move(rhs.polyline); return *this; }
|
||||
ExtrusionPath& operator=(const ExtrusionPath& rhs) { m_role = rhs.m_role; this->mm3_per_mm = rhs.mm3_per_mm; this->width = rhs.width; this->height = rhs.height; this->polyline = rhs.polyline; return *this; }
|
||||
ExtrusionPath& operator=(ExtrusionPath&& rhs) { m_role = rhs.m_role; this->mm3_per_mm = rhs.mm3_per_mm; this->width = rhs.width; this->height = rhs.height; this->polyline = std::move(rhs.polyline); return *this; }
|
||||
|
||||
ExtrusionEntity* clone() const override { return new ExtrusionPath(*this); }
|
||||
// Create a new object, initialize it with this object using the move semantics.
|
||||
ExtrusionEntity* clone_move() override { return new ExtrusionPath(std::move(*this)); }
|
||||
void reverse() override { this->polyline.reverse(); }
|
||||
Point first_point() const override { return this->polyline.points.front(); }
|
||||
Point last_point() const override { return this->polyline.points.back(); }
|
||||
const Point& first_point() const override { return this->polyline.points.front(); }
|
||||
const Point& last_point() const override { return this->polyline.points.back(); }
|
||||
size_t size() const { return this->polyline.size(); }
|
||||
bool empty() const { return this->polyline.empty(); }
|
||||
bool is_closed() const { return ! this->empty() && this->polyline.points.front() == this->polyline.points.back(); }
|
||||
|
@ -198,8 +193,8 @@ public:
|
|||
// Create a new object, initialize it with this object using the move semantics.
|
||||
ExtrusionEntity* clone_move() override { return new ExtrusionMultiPath(std::move(*this)); }
|
||||
void reverse() override;
|
||||
Point first_point() const override { return this->paths.front().polyline.points.front(); }
|
||||
Point last_point() const override { return this->paths.back().polyline.points.back(); }
|
||||
const Point& first_point() const override { return this->paths.front().polyline.points.front(); }
|
||||
const Point& last_point() const override { return this->paths.back().polyline.points.back(); }
|
||||
double length() const override;
|
||||
ExtrusionRole role() const override { return this->paths.empty() ? erNone : this->paths.front().role(); }
|
||||
// Produce a list of 2D polygons covered by the extruded paths, offsetted by the extrusion width.
|
||||
|
@ -241,8 +236,8 @@ public:
|
|||
bool make_clockwise();
|
||||
bool make_counter_clockwise();
|
||||
void reverse() override;
|
||||
Point first_point() const override { return this->paths.front().polyline.points.front(); }
|
||||
Point last_point() const override { assert(first_point() == this->paths.back().polyline.points.back()); return first_point(); }
|
||||
const Point& first_point() const override { return this->paths.front().polyline.points.front(); }
|
||||
const Point& last_point() const override { assert(this->first_point() == this->paths.back().polyline.points.back()); return this->first_point(); }
|
||||
Polygon polygon() const;
|
||||
double length() const override;
|
||||
bool split_at_vertex(const Point &point);
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "ExtrusionEntityCollection.hpp"
|
||||
#include "ShortestPath.hpp"
|
||||
#include <algorithm>
|
||||
#include <cmath>
|
||||
#include <map>
|
||||
|
@ -16,7 +17,6 @@ ExtrusionEntityCollection& ExtrusionEntityCollection::operator=(const ExtrusionE
|
|||
this->entities = other.entities;
|
||||
for (size_t i = 0; i < this->entities.size(); ++i)
|
||||
this->entities[i] = this->entities[i]->clone();
|
||||
this->orig_indices = other.orig_indices;
|
||||
this->no_sort = other.no_sort;
|
||||
return *this;
|
||||
}
|
||||
|
@ -24,7 +24,6 @@ ExtrusionEntityCollection& ExtrusionEntityCollection::operator=(const ExtrusionE
|
|||
void ExtrusionEntityCollection::swap(ExtrusionEntityCollection &c)
|
||||
{
|
||||
std::swap(this->entities, c.entities);
|
||||
std::swap(this->orig_indices, c.orig_indices);
|
||||
std::swap(this->no_sort, c.no_sort);
|
||||
}
|
||||
|
||||
|
@ -75,79 +74,31 @@ void ExtrusionEntityCollection::remove(size_t i)
|
|||
this->entities.erase(this->entities.begin() + i);
|
||||
}
|
||||
|
||||
ExtrusionEntityCollection ExtrusionEntityCollection::chained_path(bool no_reverse, ExtrusionRole role) const
|
||||
ExtrusionEntityCollection ExtrusionEntityCollection::chained_path_from(const Point &start_near, ExtrusionRole role) const
|
||||
{
|
||||
ExtrusionEntityCollection coll;
|
||||
this->chained_path(&coll, no_reverse, role);
|
||||
return coll;
|
||||
}
|
||||
|
||||
void ExtrusionEntityCollection::chained_path(ExtrusionEntityCollection* retval, bool no_reverse, ExtrusionRole role, std::vector<size_t>* orig_indices) const
|
||||
{
|
||||
if (this->entities.empty()) return;
|
||||
this->chained_path_from(this->entities.front()->first_point(), retval, no_reverse, role, orig_indices);
|
||||
}
|
||||
|
||||
ExtrusionEntityCollection ExtrusionEntityCollection::chained_path_from(Point start_near, bool no_reverse, ExtrusionRole role) const
|
||||
{
|
||||
ExtrusionEntityCollection coll;
|
||||
this->chained_path_from(start_near, &coll, no_reverse, role);
|
||||
return coll;
|
||||
}
|
||||
|
||||
void ExtrusionEntityCollection::chained_path_from(Point start_near, ExtrusionEntityCollection* retval, bool no_reverse, ExtrusionRole role, std::vector<size_t>* orig_indices) const
|
||||
{
|
||||
if (this->no_sort) {
|
||||
*retval = *this;
|
||||
return;
|
||||
}
|
||||
|
||||
retval->entities.reserve(this->entities.size());
|
||||
retval->orig_indices.reserve(this->entities.size());
|
||||
|
||||
// if we're asked to return the original indices, build a map
|
||||
std::map<ExtrusionEntity*,size_t> indices_map;
|
||||
|
||||
ExtrusionEntitiesPtr my_paths;
|
||||
for (ExtrusionEntity * const &entity_src : this->entities) {
|
||||
if (role != erMixed) {
|
||||
// The caller wants only paths with a specific extrusion role.
|
||||
auto role2 = entity_src->role();
|
||||
if (role != role2) {
|
||||
// This extrusion entity does not match the role asked.
|
||||
assert(role2 != erMixed);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
ExtrusionEntity *entity = entity_src->clone();
|
||||
my_paths.push_back(entity);
|
||||
if (orig_indices != nullptr)
|
||||
indices_map[entity] = &entity_src - &this->entities.front();
|
||||
}
|
||||
|
||||
Points endpoints;
|
||||
for (const ExtrusionEntity *entity : my_paths) {
|
||||
endpoints.push_back(entity->first_point());
|
||||
endpoints.push_back((no_reverse || ! entity->can_reverse()) ?
|
||||
entity->first_point() : entity->last_point());
|
||||
}
|
||||
|
||||
while (! my_paths.empty()) {
|
||||
// find nearest point
|
||||
int start_index = start_near.nearest_point_index(endpoints);
|
||||
int path_index = start_index/2;
|
||||
ExtrusionEntity* entity = my_paths.at(path_index);
|
||||
// never reverse loops, since it's pointless for chained path and callers might depend on orientation
|
||||
if (start_index % 2 && !no_reverse && entity->can_reverse())
|
||||
entity->reverse();
|
||||
retval->entities.push_back(my_paths.at(path_index));
|
||||
if (orig_indices != nullptr)
|
||||
orig_indices->push_back(indices_map[entity]);
|
||||
my_paths.erase(my_paths.begin() + path_index);
|
||||
endpoints.erase(endpoints.begin() + 2*path_index, endpoints.begin() + 2*path_index + 2);
|
||||
start_near = retval->entities.back()->last_point();
|
||||
}
|
||||
ExtrusionEntityCollection out;
|
||||
if (this->no_sort) {
|
||||
out = *this;
|
||||
} else {
|
||||
if (role == erMixed)
|
||||
out = *this;
|
||||
else {
|
||||
for (const ExtrusionEntity *ee : this->entities) {
|
||||
if (role != erMixed) {
|
||||
// The caller wants only paths with a specific extrusion role.
|
||||
auto role2 = ee->role();
|
||||
if (role != role2) {
|
||||
// This extrusion entity does not match the role asked.
|
||||
assert(role2 != erMixed);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
out.entities.emplace_back(ee->clone());
|
||||
}
|
||||
}
|
||||
chain_and_reorder_extrusion_entities(out.entities, &start_near);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
void ExtrusionEntityCollection::polygons_covered_by_width(Polygons &out, const float scaled_epsilon) const
|
||||
|
@ -175,18 +126,26 @@ size_t ExtrusionEntityCollection::items_count() const
|
|||
}
|
||||
|
||||
// Returns a single vector of pointers to all non-collection items contained in this one.
|
||||
ExtrusionEntityCollection ExtrusionEntityCollection::flatten() const
|
||||
ExtrusionEntityCollection ExtrusionEntityCollection::flatten(bool preserve_ordering) const
|
||||
{
|
||||
struct Flatten {
|
||||
Flatten(bool preserve_ordering) : preserve_ordering(preserve_ordering) {}
|
||||
ExtrusionEntityCollection out;
|
||||
bool preserve_ordering;
|
||||
void recursive_do(const ExtrusionEntityCollection &collection) {
|
||||
for (const ExtrusionEntity* entity : collection.entities)
|
||||
if (entity->is_collection())
|
||||
this->recursive_do(*static_cast<const ExtrusionEntityCollection*>(entity));
|
||||
else
|
||||
out.append(*entity);
|
||||
if (collection.no_sort && preserve_ordering) {
|
||||
// Don't flatten whatever happens below this level.
|
||||
out.append(collection);
|
||||
} else {
|
||||
for (const ExtrusionEntity *entity : collection.entities)
|
||||
if (entity->is_collection())
|
||||
this->recursive_do(*static_cast<const ExtrusionEntityCollection*>(entity));
|
||||
else
|
||||
out.append(*entity);
|
||||
}
|
||||
}
|
||||
} flatten;
|
||||
} flatten(preserve_ordering);
|
||||
|
||||
flatten.recursive_do(*this);
|
||||
return flatten.out;
|
||||
}
|
||||
|
|
|
@ -14,19 +14,18 @@ public:
|
|||
ExtrusionEntity* clone_move() override { return new ExtrusionEntityCollection(std::move(*this)); }
|
||||
|
||||
ExtrusionEntitiesPtr entities; // we own these entities
|
||||
std::vector<size_t> orig_indices; // handy for XS
|
||||
bool no_sort;
|
||||
ExtrusionEntityCollection(): no_sort(false) {};
|
||||
ExtrusionEntityCollection(const ExtrusionEntityCollection &other) : orig_indices(other.orig_indices), no_sort(other.no_sort) { this->append(other.entities); }
|
||||
ExtrusionEntityCollection(ExtrusionEntityCollection &&other) : entities(std::move(other.entities)), orig_indices(std::move(other.orig_indices)), no_sort(other.no_sort) {}
|
||||
ExtrusionEntityCollection(): no_sort(false) {}
|
||||
ExtrusionEntityCollection(const ExtrusionEntityCollection &other) : no_sort(other.no_sort) { this->append(other.entities); }
|
||||
ExtrusionEntityCollection(ExtrusionEntityCollection &&other) : entities(std::move(other.entities)), no_sort(other.no_sort) {}
|
||||
explicit ExtrusionEntityCollection(const ExtrusionPaths &paths);
|
||||
ExtrusionEntityCollection& operator=(const ExtrusionEntityCollection &other);
|
||||
ExtrusionEntityCollection& operator=(ExtrusionEntityCollection &&other)
|
||||
{ this->entities = std::move(other.entities); this->orig_indices = std::move(other.orig_indices); this->no_sort = other.no_sort; return *this; }
|
||||
ExtrusionEntityCollection& operator=(ExtrusionEntityCollection &&other)
|
||||
{ this->entities = std::move(other.entities); this->no_sort = other.no_sort; return *this; }
|
||||
~ExtrusionEntityCollection() { clear(); }
|
||||
explicit operator ExtrusionPaths() const;
|
||||
|
||||
bool is_collection() const { return true; };
|
||||
bool is_collection() const { return true; }
|
||||
ExtrusionRole role() const override {
|
||||
ExtrusionRole out = erNone;
|
||||
for (const ExtrusionEntity *ee : entities) {
|
||||
|
@ -35,8 +34,8 @@ public:
|
|||
}
|
||||
return out;
|
||||
}
|
||||
bool can_reverse() const { return !this->no_sort; };
|
||||
bool empty() const { return this->entities.empty(); };
|
||||
bool can_reverse() const { return !this->no_sort; }
|
||||
bool empty() const { return this->entities.empty(); }
|
||||
void clear();
|
||||
void swap (ExtrusionEntityCollection &c);
|
||||
void append(const ExtrusionEntity &entity) { this->entities.emplace_back(entity.clone()); }
|
||||
|
@ -66,13 +65,10 @@ public:
|
|||
}
|
||||
void replace(size_t i, const ExtrusionEntity &entity);
|
||||
void remove(size_t i);
|
||||
ExtrusionEntityCollection chained_path(bool no_reverse = false, ExtrusionRole role = erMixed) const;
|
||||
void chained_path(ExtrusionEntityCollection* retval, bool no_reverse = false, ExtrusionRole role = erMixed, std::vector<size_t>* orig_indices = nullptr) const;
|
||||
ExtrusionEntityCollection chained_path_from(Point start_near, bool no_reverse = false, ExtrusionRole role = erMixed) const;
|
||||
void chained_path_from(Point start_near, ExtrusionEntityCollection* retval, bool no_reverse = false, ExtrusionRole role = erMixed, std::vector<size_t>* orig_indices = nullptr) const;
|
||||
ExtrusionEntityCollection chained_path_from(const Point &start_near, ExtrusionRole role = erMixed) const;
|
||||
void reverse();
|
||||
Point first_point() const { return this->entities.front()->first_point(); }
|
||||
Point last_point() const { return this->entities.back()->last_point(); }
|
||||
const Point& first_point() const { return this->entities.front()->first_point(); }
|
||||
const Point& last_point() const { return this->entities.back()->last_point(); }
|
||||
// Produce a list of 2D polygons covered by the extruded paths, offsetted by the extrusion width.
|
||||
// Increase the offset by scaled_epsilon to achieve an overlap, so a union will produce no gaps.
|
||||
void polygons_covered_by_width(Polygons &out, const float scaled_epsilon) const override;
|
||||
|
@ -85,7 +81,10 @@ public:
|
|||
Polygons polygons_covered_by_spacing(const float scaled_epsilon = 0.f) const
|
||||
{ Polygons out; this->polygons_covered_by_spacing(out, scaled_epsilon); return out; }
|
||||
size_t items_count() const;
|
||||
ExtrusionEntityCollection flatten() const;
|
||||
/// Returns a flattened copy of this ExtrusionEntityCollection. That is, all of the items in its entities vector are not collections.
|
||||
/// You should be iterating over flatten().entities if you are interested in the underlying ExtrusionEntities (and don't care about hierarchy).
|
||||
/// \param preserve_ordering Flag to method that will flatten if and only if the underlying collection is sortable when True (default: False).
|
||||
ExtrusionEntityCollection flatten(bool preserve_ordering = false) const;
|
||||
double min_mm3_per_mm() const;
|
||||
double total_volume() const override { double volume=0.; for (const auto& ent : entities) volume+=ent->total_volume(); return volume; }
|
||||
|
||||
|
|
|
@ -29,8 +29,6 @@ public:
|
|||
FillParams params;
|
||||
};
|
||||
|
||||
void make_fill(LayerRegion &layerm, ExtrusionEntityCollection &out);
|
||||
|
||||
} // namespace Slic3r
|
||||
|
||||
#endif // slic3r_Fill_hpp_
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#include "../ClipperUtils.hpp"
|
||||
#include "../PolylineCollection.hpp"
|
||||
#include "../ShortestPath.hpp"
|
||||
#include "../Surface.hpp"
|
||||
|
||||
#include "Fill3DHoneycomb.hpp"
|
||||
|
@ -158,46 +158,18 @@ void Fill3DHoneycomb::_fill_surface_single(
|
|||
((this->layer_id/thickness_layers) % 2) + 1);
|
||||
|
||||
// move pattern in place
|
||||
for (Polylines::iterator it = polylines.begin(); it != polylines.end(); ++ it)
|
||||
it->translate(bb.min(0), bb.min(1));
|
||||
for (Polyline &pl : polylines)
|
||||
pl.translate(bb.min);
|
||||
|
||||
// clip pattern to boundaries
|
||||
polylines = intersection_pl(polylines, (Polygons)expolygon);
|
||||
// clip pattern to boundaries, chain the clipped polylines
|
||||
Polylines polylines_chained = chain_polylines(intersection_pl(polylines, to_polygons(expolygon)));
|
||||
|
||||
// connect lines
|
||||
if (! params.dont_connect && ! polylines.empty()) { // prevent calling leftmost_point() on empty collections
|
||||
ExPolygon expolygon_off;
|
||||
{
|
||||
ExPolygons expolygons_off = offset_ex(expolygon, SCALED_EPSILON);
|
||||
if (! expolygons_off.empty()) {
|
||||
// When expanding a polygon, the number of islands could only shrink. Therefore the offset_ex shall generate exactly one expanded island for one input island.
|
||||
assert(expolygons_off.size() == 1);
|
||||
std::swap(expolygon_off, expolygons_off.front());
|
||||
}
|
||||
}
|
||||
Polylines chained = PolylineCollection::chained_path_from(
|
||||
std::move(polylines),
|
||||
PolylineCollection::leftmost_point(polylines), false); // reverse allowed
|
||||
bool first = true;
|
||||
for (Polylines::iterator it_polyline = chained.begin(); it_polyline != chained.end(); ++ it_polyline) {
|
||||
if (! first) {
|
||||
// Try to connect the lines.
|
||||
Points &pts_end = polylines_out.back().points;
|
||||
const Point &first_point = it_polyline->points.front();
|
||||
const Point &last_point = pts_end.back();
|
||||
// TODO: we should also check that both points are on a fill_boundary to avoid
|
||||
// connecting paths on the boundaries of internal regions
|
||||
if ((last_point - first_point).cast<double>().norm() <= 1.5 * distance &&
|
||||
expolygon_off.contains(Line(last_point, first_point))) {
|
||||
// Append the polyline.
|
||||
pts_end.insert(pts_end.end(), it_polyline->points.begin(), it_polyline->points.end());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// The lines cannot be connected.
|
||||
polylines_out.emplace_back(std::move(*it_polyline));
|
||||
first = false;
|
||||
}
|
||||
// connect lines if needed
|
||||
if (! polylines_chained.empty()) {
|
||||
if (params.dont_connect)
|
||||
append(polylines_out, std::move(polylines_chained));
|
||||
else
|
||||
this->connect_infill(std::move(polylines_chained), expolygon, polylines_out, params);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
#include <stdio.h>
|
||||
|
||||
#include "../ClipperUtils.hpp"
|
||||
#include "../EdgeGrid.hpp"
|
||||
#include "../Surface.hpp"
|
||||
#include "../PrintConfig.hpp"
|
||||
#include "../libslic3r.h"
|
||||
|
||||
#include "FillBase.hpp"
|
||||
#include "FillConcentric.hpp"
|
||||
|
@ -148,4 +150,814 @@ std::pair<float, Point> Fill::_infill_direction(const Surface *surface) const
|
|||
return std::pair<float, Point>(out_angle, out_shift);
|
||||
}
|
||||
|
||||
#if 0
|
||||
// From pull request "Gyroid improvements" #2730 by @supermerill
|
||||
|
||||
/// cut poly between poly.point[idx_1] & poly.point[idx_1+1]
|
||||
/// add p1+-width to one part and p2+-width to the other one.
|
||||
/// add the "new" polyline to polylines (to part cut from poly)
|
||||
/// p1 & p2 have to be between poly.point[idx_1] & poly.point[idx_1+1]
|
||||
/// if idx_1 is ==0 or == size-1, then we don't need to create a new polyline.
|
||||
static void cut_polyline(Polyline &poly, Polylines &polylines, size_t idx_1, Point p1, Point p2) {
|
||||
//reorder points
|
||||
if (p1.distance_to_square(poly.points[idx_1]) > p2.distance_to_square(poly.points[idx_1])) {
|
||||
Point temp = p2;
|
||||
p2 = p1;
|
||||
p1 = temp;
|
||||
}
|
||||
if (idx_1 == poly.points.size() - 1) {
|
||||
//shouldn't be possible.
|
||||
poly.points.erase(poly.points.end() - 1);
|
||||
} else {
|
||||
// create new polyline
|
||||
Polyline new_poly;
|
||||
//put points in new_poly
|
||||
new_poly.points.push_back(p2);
|
||||
new_poly.points.insert(new_poly.points.end(), poly.points.begin() + idx_1 + 1, poly.points.end());
|
||||
//erase&put points in poly
|
||||
poly.points.erase(poly.points.begin() + idx_1 + 1, poly.points.end());
|
||||
poly.points.push_back(p1);
|
||||
//safe test
|
||||
if (poly.length() == 0)
|
||||
poly.points = new_poly.points;
|
||||
else
|
||||
polylines.emplace_back(new_poly);
|
||||
}
|
||||
}
|
||||
|
||||
/// the poly is like a polygon but with first_point != last_point (already removed)
|
||||
static void cut_polygon(Polyline &poly, size_t idx_1, Point p1, Point p2) {
|
||||
//reorder points
|
||||
if (p1.distance_to_square(poly.points[idx_1]) > p2.distance_to_square(poly.points[idx_1])) {
|
||||
Point temp = p2;
|
||||
p2 = p1;
|
||||
p1 = temp;
|
||||
}
|
||||
//check if we need to rotate before cutting
|
||||
if (idx_1 != poly.size() - 1) {
|
||||
//put points in new_poly
|
||||
poly.points.insert(poly.points.end(), poly.points.begin(), poly.points.begin() + idx_1 + 1);
|
||||
poly.points.erase(poly.points.begin(), poly.points.begin() + idx_1 + 1);
|
||||
}
|
||||
//put points in poly
|
||||
poly.points.push_back(p1);
|
||||
poly.points.insert(poly.points.begin(), p2);
|
||||
}
|
||||
|
||||
/// check if the polyline from pts_to_check may be at 'width' distance of a point in polylines_blocker
|
||||
/// it use equally_spaced_points with width/2 precision, so don't worry with pts_to_check number of points.
|
||||
/// it use the given polylines_blocker points, be sure to put enough of them to be reliable.
|
||||
/// complexity : N(pts_to_check.equally_spaced_points(width / 2)) x N(polylines_blocker.points)
|
||||
static bool collision(const Points &pts_to_check, const Polylines &polylines_blocker, const coordf_t width) {
|
||||
//check if it's not too close to a polyline
|
||||
coordf_t min_dist_square = width * width * 0.9 - SCALED_EPSILON;
|
||||
Polyline better_polylines(pts_to_check);
|
||||
Points better_pts = better_polylines.equally_spaced_points(width / 2);
|
||||
for (const Point &p : better_pts) {
|
||||
for (const Polyline &poly2 : polylines_blocker) {
|
||||
for (const Point &p2 : poly2.points) {
|
||||
if (p.distance_to_square(p2) < min_dist_square) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Try to find a path inside polylines that allow to go from p1 to p2.
|
||||
/// width if the width of the extrusion
|
||||
/// polylines_blockers are the array of polylines to check if the path isn't blocked by something.
|
||||
/// complexity: N(polylines.points) + a collision check after that if we finded a path: N(2(p2-p1)/width) x N(polylines_blocker.points)
|
||||
static Points get_frontier(Polylines &polylines, const Point& p1, const Point& p2, const coord_t width, const Polylines &polylines_blockers, coord_t max_size = -1) {
|
||||
for (size_t idx_poly = 0; idx_poly < polylines.size(); ++idx_poly) {
|
||||
Polyline &poly = polylines[idx_poly];
|
||||
if (poly.size() <= 1) continue;
|
||||
|
||||
//loop?
|
||||
if (poly.first_point() == poly.last_point()) {
|
||||
//polygon : try to find a line for p1 & p2.
|
||||
size_t idx_11, idx_12, idx_21, idx_22;
|
||||
idx_11 = poly.closest_point_index(p1);
|
||||
idx_12 = idx_11;
|
||||
if (Line(poly.points[idx_11], poly.points[(idx_11 + 1) % (poly.points.size() - 1)]).distance_to(p1) < SCALED_EPSILON) {
|
||||
idx_12 = (idx_11 + 1) % (poly.points.size() - 1);
|
||||
} else if (Line(poly.points[(idx_11 > 0) ? (idx_11 - 1) : (poly.points.size() - 2)], poly.points[idx_11]).distance_to(p1) < SCALED_EPSILON) {
|
||||
idx_11 = (idx_11 > 0) ? (idx_11 - 1) : (poly.points.size() - 2);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
idx_21 = poly.closest_point_index(p2);
|
||||
idx_22 = idx_21;
|
||||
if (Line(poly.points[idx_21], poly.points[(idx_21 + 1) % (poly.points.size() - 1)]).distance_to(p2) < SCALED_EPSILON) {
|
||||
idx_22 = (idx_21 + 1) % (poly.points.size() - 1);
|
||||
} else if (Line(poly.points[(idx_21 > 0) ? (idx_21 - 1) : (poly.points.size() - 2)], poly.points[idx_21]).distance_to(p2) < SCALED_EPSILON) {
|
||||
idx_21 = (idx_21 > 0) ? (idx_21 - 1) : (poly.points.size() - 2);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
//edge case: on the same line
|
||||
if (idx_11 == idx_21 && idx_12 == idx_22) {
|
||||
if (collision(Points() = { p1, p2 }, polylines_blockers, width)) return Points();
|
||||
//break loop
|
||||
poly.points.erase(poly.points.end() - 1);
|
||||
cut_polygon(poly, idx_11, p1, p2);
|
||||
return Points() = { Line(p1, p2).midpoint() };
|
||||
}
|
||||
|
||||
//compute distance & array for the ++ path
|
||||
Points ret_1_to_2;
|
||||
double dist_1_to_2 = p1.distance_to(poly.points[idx_12]);
|
||||
ret_1_to_2.push_back(poly.points[idx_12]);
|
||||
size_t max = idx_12 <= idx_21 ? idx_21+1 : poly.points.size();
|
||||
for (size_t i = idx_12 + 1; i < max; i++) {
|
||||
dist_1_to_2 += poly.points[i - 1].distance_to(poly.points[i]);
|
||||
ret_1_to_2.push_back(poly.points[i]);
|
||||
}
|
||||
if (idx_12 > idx_21) {
|
||||
dist_1_to_2 += poly.points.back().distance_to(poly.points.front());
|
||||
ret_1_to_2.push_back(poly.points[0]);
|
||||
for (size_t i = 1; i <= idx_21; i++) {
|
||||
dist_1_to_2 += poly.points[i - 1].distance_to(poly.points[i]);
|
||||
ret_1_to_2.push_back(poly.points[i]);
|
||||
}
|
||||
}
|
||||
dist_1_to_2 += p2.distance_to(poly.points[idx_21]);
|
||||
|
||||
//compute distance & array for the -- path
|
||||
Points ret_2_to_1;
|
||||
double dist_2_to_1 = p1.distance_to(poly.points[idx_11]);
|
||||
ret_2_to_1.push_back(poly.points[idx_11]);
|
||||
size_t min = idx_22 <= idx_11 ? idx_22 : 0;
|
||||
for (size_t i = idx_11; i > min; i--) {
|
||||
dist_2_to_1 += poly.points[i - 1].distance_to(poly.points[i]);
|
||||
ret_2_to_1.push_back(poly.points[i - 1]);
|
||||
}
|
||||
if (idx_22 > idx_11) {
|
||||
dist_2_to_1 += poly.points.back().distance_to(poly.points.front());
|
||||
ret_2_to_1.push_back(poly.points[poly.points.size() - 1]);
|
||||
for (size_t i = poly.points.size() - 1; i > idx_22; i--) {
|
||||
dist_2_to_1 += poly.points[i - 1].distance_to(poly.points[i]);
|
||||
ret_2_to_1.push_back(poly.points[i - 1]);
|
||||
}
|
||||
}
|
||||
dist_2_to_1 += p2.distance_to(poly.points[idx_22]);
|
||||
|
||||
if (max_size < dist_2_to_1 && max_size < dist_1_to_2) {
|
||||
return Points();
|
||||
}
|
||||
|
||||
//choose between the two direction (keep the short one)
|
||||
if (dist_1_to_2 < dist_2_to_1) {
|
||||
if (collision(ret_1_to_2, polylines_blockers, width)) return Points();
|
||||
//break loop
|
||||
poly.points.erase(poly.points.end() - 1);
|
||||
//remove points
|
||||
if (idx_12 <= idx_21) {
|
||||
poly.points.erase(poly.points.begin() + idx_12, poly.points.begin() + idx_21 + 1);
|
||||
if (idx_12 != 0) {
|
||||
cut_polygon(poly, idx_11, p1, p2);
|
||||
} //else : already cut at the good place
|
||||
} else {
|
||||
poly.points.erase(poly.points.begin() + idx_12, poly.points.end());
|
||||
poly.points.erase(poly.points.begin(), poly.points.begin() + idx_21);
|
||||
cut_polygon(poly, poly.points.size() - 1, p1, p2);
|
||||
}
|
||||
return ret_1_to_2;
|
||||
} else {
|
||||
if (collision(ret_2_to_1, polylines_blockers, width)) return Points();
|
||||
//break loop
|
||||
poly.points.erase(poly.points.end() - 1);
|
||||
//remove points
|
||||
if (idx_22 <= idx_11) {
|
||||
poly.points.erase(poly.points.begin() + idx_22, poly.points.begin() + idx_11 + 1);
|
||||
if (idx_22 != 0) {
|
||||
cut_polygon(poly, idx_21, p1, p2);
|
||||
} //else : already cut at the good place
|
||||
} else {
|
||||
poly.points.erase(poly.points.begin() + idx_22, poly.points.end());
|
||||
poly.points.erase(poly.points.begin(), poly.points.begin() + idx_11);
|
||||
cut_polygon(poly, poly.points.size() - 1, p1, p2);
|
||||
}
|
||||
return ret_2_to_1;
|
||||
}
|
||||
} else {
|
||||
//polyline : try to find a line for p1 & p2.
|
||||
size_t idx_1, idx_2;
|
||||
idx_1 = poly.closest_point_index(p1);
|
||||
if (idx_1 < poly.points.size() - 1 && Line(poly.points[idx_1], poly.points[idx_1 + 1]).distance_to(p1) < SCALED_EPSILON) {
|
||||
} else if (idx_1 > 0 && Line(poly.points[idx_1 - 1], poly.points[idx_1]).distance_to(p1) < SCALED_EPSILON) {
|
||||
idx_1 = idx_1 - 1;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
idx_2 = poly.closest_point_index(p2);
|
||||
if (idx_2 < poly.points.size() - 1 && Line(poly.points[idx_2], poly.points[idx_2 + 1]).distance_to(p2) < SCALED_EPSILON) {
|
||||
} else if (idx_2 > 0 && Line(poly.points[idx_2 - 1], poly.points[idx_2]).distance_to(p2) < SCALED_EPSILON) {
|
||||
idx_2 = idx_2 - 1;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
//edge case: on the same line
|
||||
if (idx_1 == idx_2) {
|
||||
if (collision(Points() = { p1, p2 }, polylines_blockers, width)) return Points();
|
||||
cut_polyline(poly, polylines, idx_1, p1, p2);
|
||||
return Points() = { Line(p1, p2).midpoint() };
|
||||
}
|
||||
|
||||
//create ret array
|
||||
size_t first_idx = idx_1;
|
||||
size_t last_idx = idx_2 + 1;
|
||||
if (idx_1 > idx_2) {
|
||||
first_idx = idx_2;
|
||||
last_idx = idx_1 + 1;
|
||||
}
|
||||
Points p_ret;
|
||||
p_ret.insert(p_ret.end(), poly.points.begin() + first_idx + 1, poly.points.begin() + last_idx);
|
||||
|
||||
coordf_t length = 0;
|
||||
for (size_t i = 1; i < p_ret.size(); i++) length += p_ret[i - 1].distance_to(p_ret[i]);
|
||||
|
||||
if (max_size < length) {
|
||||
return Points();
|
||||
}
|
||||
|
||||
if (collision(p_ret, polylines_blockers, width)) return Points();
|
||||
//cut polyline
|
||||
poly.points.erase(poly.points.begin() + first_idx + 1, poly.points.begin() + last_idx);
|
||||
cut_polyline(poly, polylines, first_idx, p1, p2);
|
||||
//order the returned array to be p1->p2
|
||||
if (idx_1 > idx_2) {
|
||||
std::reverse(p_ret.begin(), p_ret.end());
|
||||
}
|
||||
return p_ret;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return Points();
|
||||
}
|
||||
|
||||
/// Connect the infill_ordered polylines, in this order, from the back point to the next front point.
|
||||
/// It uses only the boundary polygons to do so, and can't pass two times at the same place.
|
||||
/// It avoid passing over the infill_ordered's polylines (preventing local over-extrusion).
|
||||
/// return the connected polylines in polylines_out. Can output polygons (stored as polylines with first_point = last_point).
|
||||
/// complexity: worst: N(infill_ordered.points) x N(boundary.points)
|
||||
/// typical: N(infill_ordered) x ( N(boundary.points) + N(infill_ordered.points) )
|
||||
void Fill::connect_infill(Polylines &&infill_ordered, const ExPolygon &boundary, Polylines &polylines_out, const FillParams ¶ms) {
|
||||
|
||||
//TODO: fallback to the quick & dirty old algorithm when n(points) is too high.
|
||||
Polylines polylines_frontier = to_polylines(((Polygons)boundary));
|
||||
|
||||
Polylines polylines_blocker;
|
||||
coord_t clip_size = scale_(this->spacing) * 2;
|
||||
for (const Polyline &polyline : infill_ordered) {
|
||||
if (polyline.length() > 2.01 * clip_size) {
|
||||
polylines_blocker.push_back(polyline);
|
||||
polylines_blocker.back().clip_end(clip_size);
|
||||
polylines_blocker.back().clip_start(clip_size);
|
||||
}
|
||||
}
|
||||
|
||||
//length between two lines
|
||||
coordf_t ideal_length = (1 / params.density) * this->spacing;
|
||||
|
||||
Polylines polylines_connected_first;
|
||||
bool first = true;
|
||||
for (const Polyline &polyline : infill_ordered) {
|
||||
if (!first) {
|
||||
// Try to connect the lines.
|
||||
Points &pts_end = polylines_connected_first.back().points;
|
||||
const Point &last_point = pts_end.back();
|
||||
const Point &first_point = polyline.points.front();
|
||||
if (last_point.distance_to(first_point) < scale_(this->spacing) * 10) {
|
||||
Points pts_frontier = get_frontier(polylines_frontier, last_point, first_point, scale_(this->spacing), polylines_blocker, (coord_t)scale_(ideal_length) * 2);
|
||||
if (!pts_frontier.empty()) {
|
||||
// The lines can be connected.
|
||||
pts_end.insert(pts_end.end(), pts_frontier.begin(), pts_frontier.end());
|
||||
pts_end.insert(pts_end.end(), polyline.points.begin(), polyline.points.end());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
// The lines cannot be connected.
|
||||
polylines_connected_first.emplace_back(std::move(polyline));
|
||||
|
||||
first = false;
|
||||
}
|
||||
|
||||
Polylines polylines_connected;
|
||||
first = true;
|
||||
for (const Polyline &polyline : polylines_connected_first) {
|
||||
if (!first) {
|
||||
// Try to connect the lines.
|
||||
Points &pts_end = polylines_connected.back().points;
|
||||
const Point &last_point = pts_end.back();
|
||||
const Point &first_point = polyline.points.front();
|
||||
|
||||
Polylines before = polylines_frontier;
|
||||
Points pts_frontier = get_frontier(polylines_frontier, last_point, first_point, scale_(this->spacing), polylines_blocker);
|
||||
if (!pts_frontier.empty()) {
|
||||
// The lines can be connected.
|
||||
pts_end.insert(pts_end.end(), pts_frontier.begin(), pts_frontier.end());
|
||||
pts_end.insert(pts_end.end(), polyline.points.begin(), polyline.points.end());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// The lines cannot be connected.
|
||||
polylines_connected.emplace_back(std::move(polyline));
|
||||
|
||||
first = false;
|
||||
}
|
||||
|
||||
//try to link to nearest point if possible
|
||||
for (size_t idx1 = 0; idx1 < polylines_connected.size(); idx1++) {
|
||||
size_t min_idx = 0;
|
||||
coordf_t min_length = 0;
|
||||
bool switch_id1 = false;
|
||||
bool switch_id2 = false;
|
||||
for (size_t idx2 = idx1 + 1; idx2 < polylines_connected.size(); idx2++) {
|
||||
double last_first = polylines_connected[idx1].last_point().distance_to_square(polylines_connected[idx2].first_point());
|
||||
double first_first = polylines_connected[idx1].first_point().distance_to_square(polylines_connected[idx2].first_point());
|
||||
double first_last = polylines_connected[idx1].first_point().distance_to_square(polylines_connected[idx2].last_point());
|
||||
double last_last = polylines_connected[idx1].last_point().distance_to_square(polylines_connected[idx2].last_point());
|
||||
double min = std::min(std::min(last_first, last_last), std::min(first_first, first_last));
|
||||
if (min < min_length || min_length == 0) {
|
||||
min_idx = idx2;
|
||||
switch_id1 = (std::min(last_first, last_last) > std::min(first_first, first_last));
|
||||
switch_id2 = (std::min(last_first, first_first) > std::min(last_last, first_last));
|
||||
min_length = min;
|
||||
}
|
||||
}
|
||||
if (min_idx > idx1 && min_idx < polylines_connected.size()){
|
||||
Points pts_frontier = get_frontier(polylines_frontier,
|
||||
switch_id1 ? polylines_connected[idx1].first_point() : polylines_connected[idx1].last_point(),
|
||||
switch_id2 ? polylines_connected[min_idx].last_point() : polylines_connected[min_idx].first_point(),
|
||||
scale_(this->spacing), polylines_blocker);
|
||||
if (!pts_frontier.empty()) {
|
||||
if (switch_id1) polylines_connected[idx1].reverse();
|
||||
if (switch_id2) polylines_connected[min_idx].reverse();
|
||||
Points &pts_end = polylines_connected[idx1].points;
|
||||
pts_end.insert(pts_end.end(), pts_frontier.begin(), pts_frontier.end());
|
||||
pts_end.insert(pts_end.end(), polylines_connected[min_idx].points.begin(), polylines_connected[min_idx].points.end());
|
||||
polylines_connected.erase(polylines_connected.begin() + min_idx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//try to create some loops if possible
|
||||
for (Polyline &polyline : polylines_connected) {
|
||||
Points pts_frontier = get_frontier(polylines_frontier, polyline.last_point(), polyline.first_point(), scale_(this->spacing), polylines_blocker);
|
||||
if (!pts_frontier.empty()) {
|
||||
polyline.points.insert(polyline.points.end(), pts_frontier.begin(), pts_frontier.end());
|
||||
polyline.points.insert(polyline.points.begin(), polyline.points.back());
|
||||
}
|
||||
polylines_out.emplace_back(polyline);
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
struct ContourPointData {
|
||||
ContourPointData(float param) : param(param) {}
|
||||
// Eucleidean position of the contour point along the contour.
|
||||
float param = 0.f;
|
||||
// Was the segment starting with this contour point extruded?
|
||||
bool segment_consumed = false;
|
||||
// Was this point extruded over?
|
||||
bool point_consumed = false;
|
||||
};
|
||||
|
||||
// Verify whether the contour from point idx_start to point idx_end could be taken (whether all segments along the contour were not yet extruded).
|
||||
static bool could_take(const std::vector<ContourPointData> &contour_data, size_t idx_start, size_t idx_end)
|
||||
{
|
||||
for (size_t i = idx_start; i < idx_end; ) {
|
||||
if (contour_data[i].segment_consumed || contour_data[i].point_consumed)
|
||||
return false;
|
||||
if (++ i == contour_data.size())
|
||||
i = 0;
|
||||
}
|
||||
return ! contour_data[idx_end].point_consumed;
|
||||
}
|
||||
|
||||
// Connect end of pl1 to the start of pl2 using the perimeter contour.
|
||||
// The idx_start and idx_end are ordered so that the connecting polyline points will be taken with increasing indices.
|
||||
static void take(Polyline &pl1, Polyline &&pl2, const Points &contour, std::vector<ContourPointData> &contour_data, size_t idx_start, size_t idx_end, bool reversed)
|
||||
{
|
||||
#ifndef NDEBUG
|
||||
size_t num_points_initial = pl1.points.size();
|
||||
assert(idx_start != idx_end);
|
||||
#endif /* NDEBUG */
|
||||
|
||||
{
|
||||
// Reserve memory at pl1 for the connecting contour and pl2.
|
||||
int new_points = int(idx_end) - int(idx_start) - 1;
|
||||
if (new_points < 0)
|
||||
new_points += int(contour.size());
|
||||
pl1.points.reserve(pl1.points.size() + size_t(new_points) + pl2.points.size());
|
||||
}
|
||||
|
||||
contour_data[idx_start].point_consumed = true;
|
||||
contour_data[idx_start].segment_consumed = true;
|
||||
contour_data[idx_end ].point_consumed = true;
|
||||
|
||||
if (reversed) {
|
||||
size_t i = (idx_end == 0) ? contour_data.size() - 1 : idx_end - 1;
|
||||
while (i != idx_start) {
|
||||
contour_data[i].point_consumed = true;
|
||||
contour_data[i].segment_consumed = true;
|
||||
pl1.points.emplace_back(contour[i]);
|
||||
if (i == 0)
|
||||
i = contour_data.size();
|
||||
-- i;
|
||||
}
|
||||
} else {
|
||||
size_t i = idx_start;
|
||||
if (++ i == contour_data.size())
|
||||
i = 0;
|
||||
while (i != idx_end) {
|
||||
contour_data[i].point_consumed = true;
|
||||
contour_data[i].segment_consumed = true;
|
||||
pl1.points.emplace_back(contour[i]);
|
||||
if (++ i == contour_data.size())
|
||||
i = 0;
|
||||
}
|
||||
}
|
||||
|
||||
append(pl1.points, std::move(pl2.points));
|
||||
}
|
||||
|
||||
// Return an index of start of a segment and a point of the clipping point at distance from the end of polyline.
|
||||
struct SegmentPoint {
|
||||
// Segment index, defining a line <idx_segment, idx_segment + 1).
|
||||
size_t idx_segment = std::numeric_limits<size_t>::max();
|
||||
// Parameter of point in <0, 1) along the line <idx_segment, idx_segment + 1)
|
||||
double t;
|
||||
Vec2d point;
|
||||
|
||||
bool valid() const { return idx_segment != std::numeric_limits<size_t>::max(); }
|
||||
};
|
||||
|
||||
static inline SegmentPoint clip_start_segment_and_point(const Points &polyline, double distance)
|
||||
{
|
||||
assert(polyline.size() >= 2);
|
||||
assert(distance > 0.);
|
||||
// Initialized to "invalid".
|
||||
SegmentPoint out;
|
||||
if (polyline.size() >= 2) {
|
||||
const double d2 = distance * distance;
|
||||
Vec2d pt_prev = polyline.front().cast<double>();
|
||||
for (int i = 1; i < polyline.size(); ++ i) {
|
||||
Vec2d pt = polyline[i].cast<double>();
|
||||
Vec2d v = pt - pt_prev;
|
||||
double l2 = v.squaredNorm();
|
||||
if (l2 > d2) {
|
||||
out.idx_segment = i;
|
||||
out.t = distance / sqrt(l2);
|
||||
out.point = pt + out.t * v;
|
||||
break;
|
||||
}
|
||||
distance -= sqrt(l2);
|
||||
pt_prev = pt;
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
static inline SegmentPoint clip_end_segment_and_point(const Points &polyline, double distance)
|
||||
{
|
||||
assert(polyline.size() >= 2);
|
||||
assert(distance > 0.);
|
||||
// Initialized to "invalid".
|
||||
SegmentPoint out;
|
||||
if (polyline.size() >= 2) {
|
||||
const double d2 = distance * distance;
|
||||
Vec2d pt_next = polyline.back().cast<double>();
|
||||
for (int i = int(polyline.size()) - 2; i >= 0; -- i) {
|
||||
Vec2d pt = polyline[i].cast<double>();
|
||||
Vec2d v = pt - pt_next;
|
||||
double l2 = v.squaredNorm();
|
||||
if (l2 > d2) {
|
||||
out.idx_segment = i;
|
||||
out.t = distance / sqrt(l2);
|
||||
out.point = pt + out.t * v;
|
||||
break;
|
||||
}
|
||||
distance -= sqrt(l2);
|
||||
pt_next = pt;
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
static inline double segment_point_distance_squared(const Vec2d &p1a, const Vec2d &p1b, const Vec2d &p2)
|
||||
{
|
||||
const Vec2d v = p1b - p1a;
|
||||
const Vec2d va = p2 - p1a;
|
||||
const double l2 = v.squaredNorm();
|
||||
if (l2 < EPSILON)
|
||||
// p1a == p1b
|
||||
return va.squaredNorm();
|
||||
// Project p2 onto the (p1a, p1b) segment.
|
||||
const double t = va.dot(v);
|
||||
if (t < 0.)
|
||||
return va.squaredNorm();
|
||||
else if (t > l2)
|
||||
return (p2 - p1b).squaredNorm();
|
||||
return ((t / l2) * v - va).squaredNorm();
|
||||
}
|
||||
|
||||
// Distance to the closest point of line.
|
||||
static inline double min_distance_of_segments(const Vec2d &p1a, const Vec2d &p1b, const Vec2d &p2a, const Vec2d &p2b)
|
||||
{
|
||||
Vec2d v1 = p1b - p1a;
|
||||
double l1_2 = v1.squaredNorm();
|
||||
if (l1_2 < EPSILON)
|
||||
// p1a == p1b: Return distance of p1a from the (p2a, p2b) segment.
|
||||
return segment_point_distance_squared(p2a, p2b, p1a);
|
||||
|
||||
Vec2d v2 = p2b - p2a;
|
||||
double l2_2 = v2.squaredNorm();
|
||||
if (l2_2 < EPSILON)
|
||||
// p2a == p2b: Return distance of p2a from the (p1a, p1b) segment.
|
||||
return segment_point_distance_squared(p1a, p1b, p2a);
|
||||
|
||||
// Project p2a, p2b onto the (p1a, p1b) segment.
|
||||
auto project_p2a_p2b_onto_seg_p1a_p1b = [](const Vec2d& p1a, const Vec2d& p1b, const Vec2d& p2a, const Vec2d& p2b, const Vec2d& v1, const double l1_2) {
|
||||
Vec2d v1a2a = p2a - p1a;
|
||||
Vec2d v1a2b = p2b - p1a;
|
||||
double t1 = v1a2a.dot(v1);
|
||||
double t2 = v1a2b.dot(v1);
|
||||
if (t1 <= 0.) {
|
||||
if (t2 <= 0.)
|
||||
// Both p2a and p2b are left of v1.
|
||||
return (((t1 < t2) ? p2b : p2a) - p1a).squaredNorm();
|
||||
else if (t2 < l1_2)
|
||||
// Project p2b onto the (p1a, p1b) segment.
|
||||
return ((t2 / l1_2) * v1 - v1a2b).squaredNorm();
|
||||
}
|
||||
else if (t1 >= l1_2) {
|
||||
if (t2 >= l1_2)
|
||||
// Both p2a and p2b are right of v1.
|
||||
return (((t1 < t2) ? p2a : p2b) - p1b).squaredNorm();
|
||||
else if (t2 < l1_2)
|
||||
// Project p2b onto the (p1a, p1b) segment.
|
||||
return ((t2 / l1_2) * v1 - v1a2b).squaredNorm();
|
||||
}
|
||||
else {
|
||||
// Project p1b onto the (p1a, p1b) segment.
|
||||
double dist_min = ((t2 / l1_2) * v1 - v1a2a).squaredNorm();
|
||||
if (t2 > 0. && t2 < l1_2)
|
||||
dist_min = std::min(dist_min, ((t2 / l1_2) * v1 - v1a2b).squaredNorm());
|
||||
return dist_min;
|
||||
}
|
||||
return std::numeric_limits<double>::max();
|
||||
};
|
||||
|
||||
return std::min(
|
||||
project_p2a_p2b_onto_seg_p1a_p1b(p1a, p1b, p2a, p2b, v1, l1_2),
|
||||
project_p2a_p2b_onto_seg_p1a_p1b(p2a, p2b, p1a, p1b, v2, l2_2));
|
||||
}
|
||||
|
||||
// Mark the segments of split boundary as consumed if they are very close to some of the infill line.
|
||||
void mark_boundary_segments_touching_infill(
|
||||
const std::vector<Points> &boundary,
|
||||
std::vector<std::vector<ContourPointData>> &boundary_data,
|
||||
const BoundingBox &boundary_bbox,
|
||||
const Polylines &infill,
|
||||
const double clip_distance,
|
||||
const double distance_colliding)
|
||||
{
|
||||
EdgeGrid::Grid grid;
|
||||
grid.set_bbox(boundary_bbox);
|
||||
// Inflate the bounding box by a thick line width.
|
||||
grid.create(boundary, clip_distance + scale_(10.));
|
||||
|
||||
struct Visitor {
|
||||
Visitor(const EdgeGrid::Grid &grid, const std::vector<Points> &boundary, std::vector<std::vector<ContourPointData>> &boundary_data, const double dist2_max) :
|
||||
grid(grid), boundary(boundary), boundary_data(boundary_data), dist2_max(dist2_max) {}
|
||||
|
||||
void init(const Vec2d &pt1, const Vec2d &pt2) {
|
||||
this->pt1 = &pt1;
|
||||
this->pt2 = &pt2;
|
||||
}
|
||||
|
||||
bool operator()(coord_t iy, coord_t ix) {
|
||||
// Called with a row and colum of the grid cell, which is intersected by a line.
|
||||
auto cell_data_range = this->grid.cell_data_range(iy, ix);
|
||||
for (auto it_contour_and_segment = cell_data_range.first; it_contour_and_segment != cell_data_range.second; ++ it_contour_and_segment) {
|
||||
// End points of the line segment and their vector.
|
||||
auto segment = this->grid.segment(*it_contour_and_segment);
|
||||
const Vec2d seg_pt1 = segment.first.cast<double>();
|
||||
const Vec2d seg_pt2 = segment.second.cast<double>();
|
||||
if (min_distance_of_segments(seg_pt1, seg_pt2, *this->pt1, *this->pt2) < this->dist2_max) {
|
||||
// Mark this boundary segment as touching the infill line.
|
||||
ContourPointData&bdp = boundary_data[it_contour_and_segment->first][it_contour_and_segment->second];
|
||||
bdp.segment_consumed = true;
|
||||
// There is no need for checking seg_pt2 as it will be checked the next time.
|
||||
if (segment_point_distance_squared(*this->pt1, *this->pt2, seg_pt1) < this->dist2_max)
|
||||
bdp.point_consumed = true;
|
||||
}
|
||||
}
|
||||
// Continue traversing the grid along the edge.
|
||||
return true;
|
||||
}
|
||||
|
||||
const EdgeGrid::Grid &grid;
|
||||
const std::vector<Points> &boundary;
|
||||
std::vector<std::vector<ContourPointData>> &boundary_data;
|
||||
// Maximum distance between the boundary and the infill line allowed to consider the boundary not touching the infill line.
|
||||
const double dist2_max;
|
||||
|
||||
const Vec2d *pt1;
|
||||
const Vec2d *pt2;
|
||||
} visitor(grid, boundary, boundary_data, distance_colliding * distance_colliding);
|
||||
|
||||
for (const Polyline &polyline : infill) {
|
||||
// Clip the infill polyline by the Eucledian distance along the polyline.
|
||||
SegmentPoint start_point = clip_start_segment_and_point(polyline.points, clip_distance);
|
||||
SegmentPoint end_point = clip_end_segment_and_point(polyline.points, clip_distance);
|
||||
if (start_point.valid() && end_point.valid() &&
|
||||
(start_point.idx_segment < end_point.idx_segment || (start_point.idx_segment == end_point.idx_segment && start_point.t < end_point.t))) {
|
||||
// The clipped polyline is non-empty.
|
||||
for (size_t point_idx = start_point.idx_segment; point_idx <= end_point.idx_segment; ++ point_idx) {
|
||||
//FIXME extend the EdgeGrid to suport tracing a thick line.
|
||||
#if 0
|
||||
Point pt1, pt2;
|
||||
Vec2d pt1d, pt2d;
|
||||
if (point_idx == start_point.idx_segment) {
|
||||
pt1d = start_point.point;
|
||||
pt1 = pt1d.cast<coord_t>();
|
||||
} else {
|
||||
pt1 = polyline.points[point_idx];
|
||||
pt1d = pt1.cast<double>();
|
||||
}
|
||||
if (point_idx == start_point.idx_segment) {
|
||||
pt2d = end_point.point;
|
||||
pt2 = pt1d.cast<coord_t>();
|
||||
} else {
|
||||
pt2 = polyline.points[point_idx];
|
||||
pt2d = pt2.cast<double>();
|
||||
}
|
||||
visitor.init(pt1d, pt2d);
|
||||
grid.visit_cells_intersecting_thick_line(pt1, pt2, distance_colliding, visitor);
|
||||
#else
|
||||
Vec2d pt1 = (point_idx == start_point.idx_segment) ? start_point.point : polyline.points[point_idx].cast<double>();
|
||||
Vec2d pt2 = (point_idx == end_point .idx_segment) ? end_point .point : polyline.points[point_idx].cast<double>();
|
||||
visitor.init(pt1, pt2);
|
||||
// Simulate tracing of a thick line. This only works reliably if distance_colliding <= grid cell size.
|
||||
Vec2d v = (pt2 - pt1).normalized() * distance_colliding;
|
||||
Vec2d vperp(-v.y(), v.x());
|
||||
Vec2d a = pt1 - v - vperp;
|
||||
Vec2d b = pt1 + v - vperp;
|
||||
grid.visit_cells_intersecting_line(a.cast<coord_t>(), b.cast<coord_t>(), visitor);
|
||||
a = pt1 - v + vperp;
|
||||
b = pt1 + v + vperp;
|
||||
grid.visit_cells_intersecting_line(a.cast<coord_t>(), b.cast<coord_t>(), visitor);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Fill::connect_infill(Polylines &&infill_ordered, const ExPolygon &boundary_src, Polylines &polylines_out, const FillParams ¶ms)
|
||||
{
|
||||
assert(! infill_ordered.empty());
|
||||
assert(! boundary_src.contour.points.empty());
|
||||
|
||||
BoundingBox bbox = get_extents(boundary_src.contour);
|
||||
bbox.offset(SCALED_EPSILON);
|
||||
|
||||
// 1) Add the end points of infill_ordered to boundary_src.
|
||||
std::vector<Points> boundary;
|
||||
std::vector<std::vector<ContourPointData>> boundary_data;
|
||||
boundary.assign(boundary_src.holes.size() + 1, Points());
|
||||
boundary_data.assign(boundary_src.holes.size() + 1, std::vector<ContourPointData>());
|
||||
// Mapping the infill_ordered end point to a (contour, point) of boundary.
|
||||
std::vector<std::pair<size_t, size_t>> map_infill_end_point_to_boundary;
|
||||
map_infill_end_point_to_boundary.assign(infill_ordered.size() * 2, std::pair<size_t, size_t>(std::numeric_limits<size_t>::max(), std::numeric_limits<size_t>::max()));
|
||||
{
|
||||
// Project the infill_ordered end points onto boundary_src.
|
||||
std::vector<std::pair<EdgeGrid::Grid::ClosestPointResult, size_t>> intersection_points;
|
||||
{
|
||||
EdgeGrid::Grid grid;
|
||||
grid.set_bbox(bbox);
|
||||
grid.create(boundary_src, scale_(10.));
|
||||
intersection_points.reserve(infill_ordered.size() * 2);
|
||||
for (const Polyline &pl : infill_ordered)
|
||||
for (const Point *pt : { &pl.points.front(), &pl.points.back() }) {
|
||||
EdgeGrid::Grid::ClosestPointResult cp = grid.closest_point(*pt, SCALED_EPSILON);
|
||||
if (cp.valid()) {
|
||||
// The infill end point shall lie on the contour.
|
||||
assert(cp.distance < 2.);
|
||||
intersection_points.emplace_back(cp, (&pl - infill_ordered.data()) * 2 + (pt == &pl.points.front() ? 0 : 1));
|
||||
}
|
||||
}
|
||||
std::sort(intersection_points.begin(), intersection_points.end(), [](const std::pair<EdgeGrid::Grid::ClosestPointResult, size_t> &cp1, const std::pair<EdgeGrid::Grid::ClosestPointResult, size_t> &cp2) {
|
||||
return cp1.first.contour_idx < cp2.first.contour_idx ||
|
||||
(cp1.first.contour_idx == cp2.first.contour_idx &&
|
||||
(cp1.first.start_point_idx < cp2.first.start_point_idx ||
|
||||
(cp1.first.start_point_idx == cp2.first.start_point_idx && cp1.first.t < cp2.first.t)));
|
||||
});
|
||||
}
|
||||
auto it = intersection_points.begin();
|
||||
auto it_end = intersection_points.end();
|
||||
for (size_t idx_contour = 0; idx_contour <= boundary_src.holes.size(); ++ idx_contour) {
|
||||
const Polygon &contour_src = (idx_contour == 0) ? boundary_src.contour : boundary_src.holes[idx_contour - 1];
|
||||
Points &contour_dst = boundary[idx_contour];
|
||||
for (size_t idx_point = 0; idx_point < contour_src.points.size(); ++ idx_point) {
|
||||
contour_dst.emplace_back(contour_src.points[idx_point]);
|
||||
for (; it != it_end && it->first.contour_idx == idx_contour && it->first.start_point_idx == idx_point; ++ it) {
|
||||
// Add these points to the destination contour.
|
||||
const Vec2d pt1 = contour_src[idx_point].cast<double>();
|
||||
const Vec2d pt2 = (idx_point + 1 == contour_src.size() ? contour_src.points.front() : contour_src.points[idx_point + 1]).cast<double>();
|
||||
const Vec2d pt = lerp(pt1, pt2, it->first.t);
|
||||
map_infill_end_point_to_boundary[it->second] = std::make_pair(idx_contour, contour_dst.size());
|
||||
contour_dst.emplace_back(pt.cast<coord_t>());
|
||||
}
|
||||
}
|
||||
// Parametrize the curve.
|
||||
std::vector<ContourPointData> &contour_data = boundary_data[idx_contour];
|
||||
contour_data.reserve(contour_dst.size());
|
||||
contour_data.emplace_back(ContourPointData(0.f));
|
||||
for (size_t i = 1; i < contour_dst.size(); ++ i)
|
||||
contour_data.emplace_back(contour_data.back().param + (contour_dst[i].cast<float>() - contour_dst[i - 1].cast<float>()).norm());
|
||||
contour_data.front().param = contour_data.back().param + (contour_dst.back().cast<float>() - contour_dst.front().cast<float>()).norm();
|
||||
}
|
||||
|
||||
#ifndef NDEBUG
|
||||
assert(boundary.size() == boundary_src.num_contours());
|
||||
assert(std::all_of(map_infill_end_point_to_boundary.begin(), map_infill_end_point_to_boundary.end(),
|
||||
[&boundary](const std::pair<size_t, size_t> &contour_point) {
|
||||
return contour_point.first < boundary.size() && contour_point.second < boundary[contour_point.first].size();
|
||||
}));
|
||||
#endif /* NDEBUG */
|
||||
}
|
||||
|
||||
// Mark the points and segments of split boundary as consumed if they are very close to some of the infill line.
|
||||
{
|
||||
const double clip_distance = scale_(this->spacing);
|
||||
const double distance_colliding = scale_(this->spacing);
|
||||
mark_boundary_segments_touching_infill(boundary, boundary_data, bbox, infill_ordered, clip_distance, distance_colliding);
|
||||
}
|
||||
|
||||
// Chain infill_ordered.
|
||||
//FIXME run the following loop through a heap sorted by the shortest perimeter edge that could be taken.
|
||||
//length between two lines
|
||||
//const float length_max = scale_(this->spacing);
|
||||
const float length_max = scale_((2. / params.density) * this->spacing);
|
||||
size_t idx_chain_last = 0;
|
||||
for (size_t idx_chain = 1; idx_chain < infill_ordered.size(); ++ idx_chain) {
|
||||
Polyline &pl1 = infill_ordered[idx_chain_last];
|
||||
Polyline &pl2 = infill_ordered[idx_chain];
|
||||
const std::pair<size_t, size_t> *cp1 = &map_infill_end_point_to_boundary[(idx_chain - 1) * 2 + 1];
|
||||
const std::pair<size_t, size_t> *cp2 = &map_infill_end_point_to_boundary[idx_chain * 2];
|
||||
const Points &contour = boundary[cp1->first];
|
||||
std::vector<ContourPointData> &contour_data = boundary_data[cp1->first];
|
||||
bool valid = false;
|
||||
bool reversed = false;
|
||||
if (cp1->first == cp2->first) {
|
||||
// End points on the same contour. Try to connect them.
|
||||
float param_lo = (cp1->second == 0) ? 0.f : contour_data[cp1->second].param;
|
||||
float param_hi = (cp2->second == 0) ? 0.f : contour_data[cp2->second].param;
|
||||
float param_end = contour_data.front().param;
|
||||
if (param_lo > param_hi) {
|
||||
std::swap(param_lo, param_hi);
|
||||
std::swap(cp1, cp2);
|
||||
reversed = true;
|
||||
}
|
||||
assert(param_lo >= 0.f && param_lo <= param_end);
|
||||
assert(param_hi >= 0.f && param_hi <= param_end);
|
||||
float dist1 = param_hi - param_lo;
|
||||
float dist2 = param_lo + param_end - param_hi;
|
||||
if (dist1 > dist2) {
|
||||
std::swap(dist1, dist2);
|
||||
std::swap(cp1, cp2);
|
||||
reversed = ! reversed;
|
||||
}
|
||||
if (dist1 < length_max) {
|
||||
// Try to connect the shorter path.
|
||||
valid = could_take(contour_data, cp1->second, cp2->second);
|
||||
// Try to connect the longer path.
|
||||
if (! valid && dist2 < length_max) {
|
||||
std::swap(cp1, cp2);
|
||||
reversed = ! reversed;
|
||||
valid = could_take(contour_data, cp1->second, cp2->second);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (valid)
|
||||
take(pl1, std::move(pl2), contour, contour_data, cp1->second, cp2->second, reversed);
|
||||
else if (++ idx_chain_last < idx_chain)
|
||||
infill_ordered[idx_chain_last] = std::move(pl2);
|
||||
}
|
||||
infill_ordered.erase(infill_ordered.begin() + idx_chain_last + 1, infill_ordered.end());
|
||||
append(polylines_out, std::move(infill_ordered));
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace Slic3r
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
namespace Slic3r {
|
||||
|
||||
class ExPolygon;
|
||||
class Surface;
|
||||
|
||||
struct FillParams
|
||||
|
@ -110,6 +111,8 @@ protected:
|
|||
|
||||
virtual std::pair<float, Point> _infill_direction(const Surface *surface) const;
|
||||
|
||||
void connect_infill(Polylines &&infill_ordered, const ExPolygon &boundary, Polylines &polylines_out, const FillParams ¶ms);
|
||||
|
||||
public:
|
||||
static coord_t _adjust_solid_spacing(const coord_t width, const coord_t distance);
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#include "../ClipperUtils.hpp"
|
||||
#include "../PolylineCollection.hpp"
|
||||
#include "../ShortestPath.hpp"
|
||||
#include "../Surface.hpp"
|
||||
#include <cmath>
|
||||
#include <algorithm>
|
||||
|
@ -31,19 +31,26 @@ static inline double f(double x, double z_sin, double z_cos, bool vertical, bool
|
|||
|
||||
static inline Polyline make_wave(
|
||||
const std::vector<Vec2d>& one_period, double width, double height, double offset, double scaleFactor,
|
||||
double z_cos, double z_sin, bool vertical)
|
||||
double z_cos, double z_sin, bool vertical, bool flip)
|
||||
{
|
||||
std::vector<Vec2d> points = one_period;
|
||||
double period = points.back()(0);
|
||||
points.pop_back();
|
||||
int n = points.size();
|
||||
do {
|
||||
points.emplace_back(Vec2d(points[points.size()-n](0) + period, points[points.size()-n](1)));
|
||||
} while (points.back()(0) < width);
|
||||
points.back()(0) = width;
|
||||
if (width != period) // do not extend if already truncated
|
||||
{
|
||||
points.reserve(one_period.size() * floor(width / period));
|
||||
points.pop_back();
|
||||
|
||||
int n = points.size();
|
||||
do {
|
||||
points.emplace_back(Vec2d(points[points.size()-n](0) + period, points[points.size()-n](1)));
|
||||
} while (points.back()(0) < width - EPSILON);
|
||||
|
||||
points.emplace_back(Vec2d(width, f(width, z_sin, z_cos, vertical, flip)));
|
||||
}
|
||||
|
||||
// and construct the final polyline to return:
|
||||
Polyline polyline;
|
||||
polyline.points.reserve(points.size());
|
||||
for (auto& point : points) {
|
||||
point(1) += offset;
|
||||
point(1) = clamp(0., height, double(point(1)));
|
||||
|
@ -55,45 +62,56 @@ static inline Polyline make_wave(
|
|||
return polyline;
|
||||
}
|
||||
|
||||
static std::vector<Vec2d> make_one_period(double width, double scaleFactor, double z_cos, double z_sin, bool vertical, bool flip)
|
||||
static std::vector<Vec2d> make_one_period(double width, double scaleFactor, double z_cos, double z_sin, bool vertical, bool flip, double tolerance)
|
||||
{
|
||||
std::vector<Vec2d> points;
|
||||
double dx = M_PI_4; // very coarse spacing to begin with
|
||||
double dx = M_PI_2; // exact coordinates on main inflexion lobes
|
||||
double limit = std::min(2*M_PI, width);
|
||||
for (double x = 0.; x < limit + EPSILON; x += dx) { // so the last point is there too
|
||||
x = std::min(x, limit);
|
||||
points.emplace_back(Vec2d(x,f(x, z_sin,z_cos, vertical, flip)));
|
||||
}
|
||||
points.reserve(ceil(limit / tolerance / 3));
|
||||
|
||||
// now we will check all internal points and in case some are too far from the line connecting its neighbours,
|
||||
// we will add one more point on each side:
|
||||
const double tolerance = .1;
|
||||
for (unsigned int i=1;i<points.size()-1;++i) {
|
||||
auto& lp = points[i-1]; // left point
|
||||
auto& tp = points[i]; // this point
|
||||
Vec2d lrv = tp - lp;
|
||||
auto& rp = points[i+1]; // right point
|
||||
// calculate distance of the point to the line:
|
||||
double dist_mm = unscale<double>(scaleFactor) * std::abs(cross2(rp, lp) - cross2(rp - lp, tp)) / lrv.norm();
|
||||
if (dist_mm > tolerance) { // if the difference from straight line is more than this
|
||||
double x = 0.5f * (points[i-1](0) + points[i](0));
|
||||
points.emplace_back(Vec2d(x, f(x, z_sin, z_cos, vertical, flip)));
|
||||
x = 0.5f * (points[i+1](0) + points[i](0));
|
||||
points.emplace_back(Vec2d(x, f(x, z_sin, z_cos, vertical, flip)));
|
||||
// we added the points to the end, but need them all in order
|
||||
std::sort(points.begin(), points.end(), [](const Vec2d &lhs, const Vec2d &rhs){ return lhs < rhs; });
|
||||
// decrement i so we also check the first newly added point
|
||||
--i;
|
||||
for (double x = 0.; x < limit - EPSILON; x += dx) {
|
||||
points.emplace_back(Vec2d(x, f(x, z_sin, z_cos, vertical, flip)));
|
||||
}
|
||||
points.emplace_back(Vec2d(limit, f(limit, z_sin, z_cos, vertical, flip)));
|
||||
|
||||
// piecewise increase in resolution up to requested tolerance
|
||||
for(;;)
|
||||
{
|
||||
size_t size = points.size();
|
||||
for (unsigned int i = 1;i < size; ++i) {
|
||||
auto& lp = points[i-1]; // left point
|
||||
auto& rp = points[i]; // right point
|
||||
double x = lp(0) + (rp(0) - lp(0)) / 2;
|
||||
double y = f(x, z_sin, z_cos, vertical, flip);
|
||||
Vec2d ip = {x, y};
|
||||
if (std::abs(cross2(Vec2d(ip - lp), Vec2d(ip - rp))) > sqr(tolerance)) {
|
||||
points.emplace_back(std::move(ip));
|
||||
}
|
||||
}
|
||||
|
||||
if (size == points.size())
|
||||
break;
|
||||
else
|
||||
{
|
||||
// insert new points in order
|
||||
std::sort(points.begin(), points.end(),
|
||||
[](const Vec2d &lhs, const Vec2d &rhs) { return lhs(0) < rhs(0); });
|
||||
}
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
static Polylines make_gyroid_waves(double gridZ, double density_adjusted, double line_spacing, double width, double height)
|
||||
{
|
||||
const double scaleFactor = scale_(line_spacing) / density_adjusted;
|
||||
//scale factor for 5% : 8 712 388
|
||||
// 1z = 10^-6 mm ?
|
||||
|
||||
// tolerance in scaled units. clamp the maximum tolerance as there's
|
||||
// no processing-speed benefit to do so beyond a certain point
|
||||
const double tolerance = std::min(line_spacing / 2, FillGyroid::PatternTolerance) / unscale<double>(scaleFactor);
|
||||
|
||||
//scale factor for 5% : 8 712 388
|
||||
// 1z = 10^-6 mm ?
|
||||
const double z = gridZ / scaleFactor;
|
||||
const double z_sin = sin(z);
|
||||
const double z_cos = cos(z);
|
||||
|
@ -109,16 +127,20 @@ static Polylines make_gyroid_waves(double gridZ, double density_adjusted, double
|
|||
std::swap(width,height);
|
||||
}
|
||||
|
||||
std::vector<Vec2d> one_period = make_one_period(width, scaleFactor, z_cos, z_sin, vertical, flip); // creates one period of the waves, so it doesn't have to be recalculated all the time
|
||||
std::vector<Vec2d> one_period_odd = make_one_period(width, scaleFactor, z_cos, z_sin, vertical, flip, tolerance); // creates one period of the waves, so it doesn't have to be recalculated all the time
|
||||
flip = !flip; // even polylines are a bit shifted
|
||||
std::vector<Vec2d> one_period_even = make_one_period(width, scaleFactor, z_cos, z_sin, vertical, flip, tolerance);
|
||||
Polylines result;
|
||||
|
||||
for (double y0 = lower_bound; y0 < upper_bound+EPSILON; y0 += 2*M_PI) // creates odd polylines
|
||||
result.emplace_back(make_wave(one_period, width, height, y0, scaleFactor, z_cos, z_sin, vertical));
|
||||
|
||||
flip = !flip; // even polylines are a bit shifted
|
||||
one_period = make_one_period(width, scaleFactor, z_cos, z_sin, vertical, flip); // updates the one period sample
|
||||
for (double y0 = lower_bound + M_PI; y0 < upper_bound+EPSILON; y0 += 2*M_PI) // creates even polylines
|
||||
result.emplace_back(make_wave(one_period, width, height, y0, scaleFactor, z_cos, z_sin, vertical));
|
||||
for (double y0 = lower_bound; y0 < upper_bound + EPSILON; y0 += M_PI) {
|
||||
// creates odd polylines
|
||||
result.emplace_back(make_wave(one_period_odd, width, height, y0, scaleFactor, z_cos, z_sin, vertical, flip));
|
||||
// creates even polylines
|
||||
y0 += M_PI;
|
||||
if (y0 < upper_bound + EPSILON) {
|
||||
result.emplace_back(make_wave(one_period_even, width, height, y0, scaleFactor, z_cos, z_sin, vertical, flip));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -130,66 +152,49 @@ void FillGyroid::_fill_surface_single(
|
|||
ExPolygon &expolygon,
|
||||
Polylines &polylines_out)
|
||||
{
|
||||
// no rotation is supported for this infill pattern (yet)
|
||||
float infill_angle = this->angle + (CorrectionAngle * 2*M_PI) / 360.;
|
||||
if(abs(infill_angle) >= EPSILON)
|
||||
expolygon.rotate(-infill_angle);
|
||||
|
||||
BoundingBox bb = expolygon.contour.bounding_box();
|
||||
// Density adjusted to have a good %of weight.
|
||||
double density_adjusted = std::max(0., params.density * 2.44);
|
||||
double density_adjusted = std::max(0., params.density * DensityAdjust);
|
||||
// Distance between the gyroid waves in scaled coordinates.
|
||||
coord_t distance = coord_t(scale_(this->spacing) / density_adjusted);
|
||||
|
||||
// align bounding box to a multiple of our grid module
|
||||
bb.merge(_align_to_grid(bb.min, Point(2.*M_PI*distance, 2.*M_PI*distance)));
|
||||
bb.merge(_align_to_grid(bb.min, Point(2*M_PI*distance, 2*M_PI*distance)));
|
||||
|
||||
// generate pattern
|
||||
Polylines polylines = make_gyroid_waves(
|
||||
Polylines polylines_square = make_gyroid_waves(
|
||||
scale_(this->z),
|
||||
density_adjusted,
|
||||
this->spacing,
|
||||
ceil(bb.size()(0) / distance) + 1.,
|
||||
ceil(bb.size()(1) / distance) + 1.);
|
||||
|
||||
// move pattern in place
|
||||
for (Polyline &polyline : polylines)
|
||||
polyline.translate(bb.min(0), bb.min(1));
|
||||
|
||||
// clip pattern to boundaries
|
||||
polylines = intersection_pl(polylines, (Polygons)expolygon);
|
||||
// shift the polyline to the grid origin
|
||||
for (Polyline &pl : polylines_square)
|
||||
pl.translate(bb.min);
|
||||
|
||||
// connect lines
|
||||
if (! params.dont_connect && ! polylines.empty()) { // prevent calling leftmost_point() on empty collections
|
||||
ExPolygon expolygon_off;
|
||||
{
|
||||
ExPolygons expolygons_off = offset_ex(expolygon, (float)SCALED_EPSILON);
|
||||
if (! expolygons_off.empty()) {
|
||||
// When expanding a polygon, the number of islands could only shrink. Therefore the offset_ex shall generate exactly one expanded island for one input island.
|
||||
assert(expolygons_off.size() == 1);
|
||||
std::swap(expolygon_off, expolygons_off.front());
|
||||
}
|
||||
}
|
||||
Polylines chained = PolylineCollection::chained_path_from(
|
||||
std::move(polylines),
|
||||
PolylineCollection::leftmost_point(polylines), false); // reverse allowed
|
||||
bool first = true;
|
||||
for (Polyline &polyline : chained) {
|
||||
if (! first) {
|
||||
// Try to connect the lines.
|
||||
Points &pts_end = polylines_out.back().points;
|
||||
const Point &first_point = polyline.points.front();
|
||||
const Point &last_point = pts_end.back();
|
||||
// TODO: we should also check that both points are on a fill_boundary to avoid
|
||||
// connecting paths on the boundaries of internal regions
|
||||
// TODO: avoid crossing current infill path
|
||||
if ((last_point - first_point).cast<double>().norm() <= 5 * distance &&
|
||||
expolygon_off.contains(Line(last_point, first_point))) {
|
||||
// Append the polyline.
|
||||
pts_end.insert(pts_end.end(), polyline.points.begin(), polyline.points.end());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// The lines cannot be connected.
|
||||
polylines_out.emplace_back(std::move(polyline));
|
||||
first = false;
|
||||
}
|
||||
Polylines polylines_chained = chain_polylines(intersection_pl(polylines_square, to_polygons(expolygon)));
|
||||
|
||||
size_t polylines_out_first_idx = polylines_out.size();
|
||||
if (! polylines_chained.empty()) {
|
||||
// connect lines
|
||||
if (params.dont_connect)
|
||||
append(polylines_out, std::move(polylines_chained));
|
||||
else
|
||||
this->connect_infill(std::move(polylines_chained), expolygon, polylines_out, params);
|
||||
// remove too small bits (larger than longer)
|
||||
polylines_out.erase(
|
||||
std::remove_if(polylines_out.begin() + polylines_out_first_idx, polylines_out.end(), [this](const Polyline &pl){ return pl.length() < scale_(this->spacing * 3); }),
|
||||
polylines_out.end());
|
||||
// new paths must be rotated back
|
||||
if (abs(infill_angle) >= EPSILON) {
|
||||
for (auto it = polylines_out.begin() + polylines_out_first_idx; it != polylines_out.end(); ++ it)
|
||||
it->rotate(infill_angle);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,17 @@ public:
|
|||
// require bridge flow since most of this pattern hangs in air
|
||||
virtual bool use_bridge_flow() const { return false; }
|
||||
|
||||
// Correction applied to regular infill angle to maximize printing
|
||||
// speed in default configuration (degrees)
|
||||
static constexpr float CorrectionAngle = -45.;
|
||||
|
||||
// Density adjustment to have a good %of weight.
|
||||
static constexpr double DensityAdjust = 2.44;
|
||||
|
||||
// Gyroid upper resolution tolerance (mm^-2)
|
||||
static constexpr double PatternTolerance = 0.2;
|
||||
|
||||
|
||||
protected:
|
||||
virtual void _fill_surface_single(
|
||||
const FillParams ¶ms,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#include "../ClipperUtils.hpp"
|
||||
#include "../PolylineCollection.hpp"
|
||||
#include "../ShortestPath.hpp"
|
||||
#include "../Surface.hpp"
|
||||
|
||||
#include "FillHoneycomb.hpp"
|
||||
|
@ -93,22 +93,20 @@ void FillHoneycomb::_fill_surface_single(
|
|||
|
||||
// connect paths
|
||||
if (! paths.empty()) { // prevent calling leftmost_point() on empty collections
|
||||
Polylines chained = PolylineCollection::chained_path_from(
|
||||
std::move(paths),
|
||||
PolylineCollection::leftmost_point(paths), false);
|
||||
Polylines chained = chain_polylines(std::move(paths));
|
||||
assert(paths.empty());
|
||||
paths.clear();
|
||||
for (Polylines::iterator it_path = chained.begin(); it_path != chained.end(); ++ it_path) {
|
||||
for (Polyline &path : chained) {
|
||||
if (! paths.empty()) {
|
||||
// distance between first point of this path and last point of last path
|
||||
double distance = (it_path->first_point() - paths.back().last_point()).cast<double>().norm();
|
||||
double distance = (path.first_point() - paths.back().last_point()).cast<double>().norm();
|
||||
if (distance <= m.hex_width) {
|
||||
paths.back().points.insert(paths.back().points.end(), it_path->points.begin(), it_path->points.end());
|
||||
paths.back().points.insert(paths.back().points.end(), path.points.begin(), path.points.end());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Don't connect the paths.
|
||||
paths.push_back(*it_path);
|
||||
paths.push_back(std::move(path));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#include "../ClipperUtils.hpp"
|
||||
#include "../PolylineCollection.hpp"
|
||||
#include "../Surface.hpp"
|
||||
|
||||
#include "FillPlanePath.hpp"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
#include "../ClipperUtils.hpp"
|
||||
#include "../ExPolygon.hpp"
|
||||
#include "../PolylineCollection.hpp"
|
||||
#include "../ShortestPath.hpp"
|
||||
#include "../Surface.hpp"
|
||||
|
||||
#include "FillRectilinear.hpp"
|
||||
|
@ -92,15 +92,12 @@ void FillRectilinear::_fill_surface_single(
|
|||
std::swap(expolygon_off, expolygons_off.front());
|
||||
}
|
||||
}
|
||||
Polylines chained = PolylineCollection::chained_path_from(
|
||||
std::move(polylines),
|
||||
PolylineCollection::leftmost_point(polylines), false); // reverse allowed
|
||||
bool first = true;
|
||||
for (Polylines::iterator it_polyline = chained.begin(); it_polyline != chained.end(); ++ it_polyline) {
|
||||
for (Polyline &polyline : chain_polylines(std::move(polylines))) {
|
||||
if (! first) {
|
||||
// Try to connect the lines.
|
||||
Points &pts_end = polylines_out.back().points;
|
||||
const Point &first_point = it_polyline->points.front();
|
||||
const Point &first_point = polyline.points.front();
|
||||
const Point &last_point = pts_end.back();
|
||||
// Distance in X, Y.
|
||||
const Vector distance = last_point - first_point;
|
||||
|
@ -109,12 +106,12 @@ void FillRectilinear::_fill_surface_single(
|
|||
if (this->_can_connect(std::abs(distance(0)), std::abs(distance(1))) &&
|
||||
expolygon_off.contains(Line(last_point, first_point))) {
|
||||
// Append the polyline.
|
||||
pts_end.insert(pts_end.end(), it_polyline->points.begin(), it_polyline->points.end());
|
||||
pts_end.insert(pts_end.end(), polyline.points.begin(), polyline.points.end());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// The lines cannot be connected.
|
||||
polylines_out.emplace_back(std::move(*it_polyline));
|
||||
polylines_out.emplace_back(std::move(polyline));
|
||||
first = false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,9 @@
|
|||
#include "../Utils.hpp"
|
||||
#include "../GCode.hpp"
|
||||
#include "../Geometry.hpp"
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
#include "../GCode/ThumbnailData.hpp"
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
#include "../I18N.hpp"
|
||||
|
||||
|
@ -31,7 +34,8 @@ namespace pt = boost::property_tree;
|
|||
// VERSION NUMBERS
|
||||
// 0 : .3mf, files saved by older slic3r or other applications. No version definition in them.
|
||||
// 1 : Introduction of 3mf versioning. No other change in data saved into 3mf files.
|
||||
const unsigned int VERSION_3MF = 1;
|
||||
// 2 : Meshes saved in their local system; Volumes' matrices and source data added to Metadata/Slic3r_PE_model.config file.
|
||||
const unsigned int VERSION_3MF = 2;
|
||||
const char* SLIC3RPE_3MF_VERSION = "slic3rpe:Version3mf"; // definition of the metadata name saved into .model file
|
||||
|
||||
const std::string MODEL_FOLDER = "3D/";
|
||||
|
@ -39,6 +43,9 @@ const std::string MODEL_EXTENSION = ".model";
|
|||
const std::string MODEL_FILE = "3D/3dmodel.model"; // << this is the only format of the string which works with CURA
|
||||
const std::string CONTENT_TYPES_FILE = "[Content_Types].xml";
|
||||
const std::string RELATIONSHIPS_FILE = "_rels/.rels";
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
const std::string THUMBNAIL_FILE = "Metadata/thumbnail.png";
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
const std::string PRINT_CONFIG_FILE = "Metadata/Slic3r_PE.config";
|
||||
const std::string MODEL_CONFIG_FILE = "Metadata/Slic3r_PE_model.config";
|
||||
const std::string LAYER_HEIGHTS_PROFILE_FILE = "Metadata/Slic3r_PE_layer_heights_profile.txt";
|
||||
|
@ -87,6 +94,13 @@ const char* VOLUME_TYPE = "volume";
|
|||
const char* NAME_KEY = "name";
|
||||
const char* MODIFIER_KEY = "modifier";
|
||||
const char* VOLUME_TYPE_KEY = "volume_type";
|
||||
const char* MATRIX_KEY = "matrix";
|
||||
const char* SOURCE_FILE_KEY = "source_file";
|
||||
const char* SOURCE_OBJECT_ID_KEY = "source_object_id";
|
||||
const char* SOURCE_VOLUME_ID_KEY = "source_volume_id";
|
||||
const char* SOURCE_OFFSET_X_KEY = "source_offset_x";
|
||||
const char* SOURCE_OFFSET_Y_KEY = "source_offset_y";
|
||||
const char* SOURCE_OFFSET_Z_KEY = "source_offset_z";
|
||||
|
||||
const unsigned int VALID_OBJECT_TYPES_COUNT = 1;
|
||||
const char* VALID_OBJECT_TYPES[] =
|
||||
|
@ -148,11 +162,15 @@ bool get_attribute_value_bool(const char** attributes, unsigned int attributes_s
|
|||
return (text != nullptr) ? (bool)::atoi(text) : true;
|
||||
}
|
||||
|
||||
Slic3r::Transform3d get_transform_from_string(const std::string& mat_str)
|
||||
Slic3r::Transform3d get_transform_from_3mf_specs_string(const std::string& mat_str)
|
||||
{
|
||||
// check: https://3mf.io/3d-manufacturing-format/ or https://github.com/3MFConsortium/spec_core/blob/master/3MF%20Core%20Specification.md
|
||||
// to see how matrices are stored inside 3mf according to specifications
|
||||
Slic3r::Transform3d ret = Slic3r::Transform3d::Identity();
|
||||
|
||||
if (mat_str.empty())
|
||||
// empty string means default identity matrix
|
||||
return Slic3r::Transform3d::Identity();
|
||||
return ret;
|
||||
|
||||
std::vector<std::string> mat_elements_str;
|
||||
boost::split(mat_elements_str, mat_str, boost::is_any_of(" "), boost::token_compress_on);
|
||||
|
@ -160,9 +178,8 @@ Slic3r::Transform3d get_transform_from_string(const std::string& mat_str)
|
|||
unsigned int size = (unsigned int)mat_elements_str.size();
|
||||
if (size != 12)
|
||||
// invalid data, return identity matrix
|
||||
return Slic3r::Transform3d::Identity();
|
||||
return ret;
|
||||
|
||||
Slic3r::Transform3d ret = Slic3r::Transform3d::Identity();
|
||||
unsigned int i = 0;
|
||||
// matrices are stored into 3mf files as 4x3
|
||||
// we need to transpose them
|
||||
|
@ -1375,7 +1392,7 @@ namespace Slic3r {
|
|||
bool _3MF_Importer::_handle_start_component(const char** attributes, unsigned int num_attributes)
|
||||
{
|
||||
int object_id = get_attribute_value_int(attributes, num_attributes, OBJECTID_ATTR);
|
||||
Transform3d transform = get_transform_from_string(get_attribute_value_string(attributes, num_attributes, TRANSFORM_ATTR));
|
||||
Transform3d transform = get_transform_from_3mf_specs_string(get_attribute_value_string(attributes, num_attributes, TRANSFORM_ATTR));
|
||||
|
||||
IdToModelObjectMap::iterator object_item = m_objects.find(object_id);
|
||||
if (object_item == m_objects.end())
|
||||
|
@ -1421,7 +1438,7 @@ namespace Slic3r {
|
|||
// see specifications
|
||||
|
||||
int object_id = get_attribute_value_int(attributes, num_attributes, OBJECTID_ATTR);
|
||||
Transform3d transform = get_transform_from_string(get_attribute_value_string(attributes, num_attributes, TRANSFORM_ATTR));
|
||||
Transform3d transform = get_transform_from_3mf_specs_string(get_attribute_value_string(attributes, num_attributes, TRANSFORM_ATTR));
|
||||
int printable = get_attribute_value_bool(attributes, num_attributes, PRINTABLE_ATTR);
|
||||
|
||||
return _create_object_instance(object_id, transform, printable, 1);
|
||||
|
@ -1634,6 +1651,21 @@ namespace Slic3r {
|
|||
return false;
|
||||
}
|
||||
|
||||
Slic3r::Geometry::Transformation transform;
|
||||
if (m_version > 1)
|
||||
{
|
||||
// extract the volume transformation from the volume's metadata, if present
|
||||
for (const Metadata& metadata : volume_data.metadata)
|
||||
{
|
||||
if (metadata.key == MATRIX_KEY)
|
||||
{
|
||||
transform.set_from_string(metadata.value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Transform3d inv_matrix = transform.get_matrix().inverse();
|
||||
|
||||
// splits volume out of imported geometry
|
||||
TriangleMesh triangle_mesh;
|
||||
stl_file &stl = triangle_mesh.stl;
|
||||
|
@ -1651,7 +1683,12 @@ namespace Slic3r {
|
|||
stl_facet& facet = stl.facet_start[i];
|
||||
for (unsigned int v = 0; v < 3; ++v)
|
||||
{
|
||||
::memcpy(facet.vertex[v].data(), (const void*)&geometry.vertices[geometry.triangles[src_start_id + ii + v] * 3], 3 * sizeof(float));
|
||||
unsigned int tri_id = geometry.triangles[src_start_id + ii + v] * 3;
|
||||
Vec3f vertex(geometry.vertices[tri_id + 0], geometry.vertices[tri_id + 1], geometry.vertices[tri_id + 2]);
|
||||
if (m_version > 1)
|
||||
// revert the vertices to the original mesh reference system
|
||||
vertex = (inv_matrix * vertex.cast<double>()).cast<float>();
|
||||
::memcpy(facet.vertex[v].data(), (const void*)vertex.data(), 3 * sizeof(float));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1659,10 +1696,12 @@ namespace Slic3r {
|
|||
triangle_mesh.repair();
|
||||
|
||||
ModelVolume* volume = object.add_volume(std::move(triangle_mesh));
|
||||
volume->center_geometry_after_creation();
|
||||
// apply the volume matrix taken from the metadata, if present
|
||||
if (m_version > 1)
|
||||
volume->set_transformation(transform);
|
||||
volume->calculate_convex_hull();
|
||||
|
||||
// apply volume's name and config data
|
||||
// apply the remaining volume's metadata
|
||||
for (const Metadata& metadata : volume_data.metadata)
|
||||
{
|
||||
if (metadata.key == NAME_KEY)
|
||||
|
@ -1671,6 +1710,18 @@ namespace Slic3r {
|
|||
volume->set_type(ModelVolumeType::PARAMETER_MODIFIER);
|
||||
else if (metadata.key == VOLUME_TYPE_KEY)
|
||||
volume->set_type(ModelVolume::type_from_string(metadata.value));
|
||||
else if (metadata.key == SOURCE_FILE_KEY)
|
||||
volume->source.input_file = metadata.value;
|
||||
else if (metadata.key == SOURCE_OBJECT_ID_KEY)
|
||||
volume->source.object_idx = ::atoi(metadata.value.c_str());
|
||||
else if (metadata.key == SOURCE_VOLUME_ID_KEY)
|
||||
volume->source.volume_idx = ::atoi(metadata.value.c_str());
|
||||
else if (metadata.key == SOURCE_OFFSET_X_KEY)
|
||||
volume->source.mesh_offset(0) = ::atof(metadata.value.c_str());
|
||||
else if (metadata.key == SOURCE_OFFSET_Y_KEY)
|
||||
volume->source.mesh_offset(1) = ::atof(metadata.value.c_str());
|
||||
else if (metadata.key == SOURCE_OFFSET_Z_KEY)
|
||||
volume->source.mesh_offset(2) = ::atof(metadata.value.c_str());
|
||||
else
|
||||
volume->config.set_deserialize(metadata.key, metadata.value);
|
||||
}
|
||||
|
@ -1761,11 +1812,22 @@ namespace Slic3r {
|
|||
typedef std::map<int, ObjectData> IdToObjectDataMap;
|
||||
|
||||
public:
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
bool save_model_to_file(const std::string& filename, Model& model, const DynamicPrintConfig* config, const ThumbnailData* thumbnail_data = nullptr);
|
||||
#else
|
||||
bool save_model_to_file(const std::string& filename, Model& model, const DynamicPrintConfig* config);
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
private:
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
bool _save_model_to_file(const std::string& filename, Model& model, const DynamicPrintConfig* config, const ThumbnailData* thumbnail_data);
|
||||
#else
|
||||
bool _save_model_to_file(const std::string& filename, Model& model, const DynamicPrintConfig* config);
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
bool _add_content_types_file_to_archive(mz_zip_archive& archive);
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
bool _add_thumbnail_file_to_archive(mz_zip_archive& archive, const ThumbnailData& thumbnail_data);
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
bool _add_relationships_file_to_archive(mz_zip_archive& archive);
|
||||
bool _add_model_file_to_archive(mz_zip_archive& archive, const Model& model, IdToObjectDataMap &objects_data);
|
||||
bool _add_object_to_model_stream(std::stringstream& stream, unsigned int& object_id, ModelObject& object, BuildItemsList& build_items, VolumeToOffsetsMap& volumes_offsets);
|
||||
|
@ -1778,13 +1840,25 @@ namespace Slic3r {
|
|||
bool _add_model_config_file_to_archive(mz_zip_archive& archive, const Model& model, const IdToObjectDataMap &objects_data);
|
||||
};
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
bool _3MF_Exporter::save_model_to_file(const std::string& filename, Model& model, const DynamicPrintConfig* config, const ThumbnailData* thumbnail_data)
|
||||
{
|
||||
clear_errors();
|
||||
return _save_model_to_file(filename, model, config, thumbnail_data);
|
||||
}
|
||||
#else
|
||||
bool _3MF_Exporter::save_model_to_file(const std::string& filename, Model& model, const DynamicPrintConfig* config)
|
||||
{
|
||||
clear_errors();
|
||||
return _save_model_to_file(filename, model, config);
|
||||
}
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
bool _3MF_Exporter::_save_model_to_file(const std::string& filename, Model& model, const DynamicPrintConfig* config, const ThumbnailData* thumbnail_data)
|
||||
#else
|
||||
bool _3MF_Exporter::_save_model_to_file(const std::string& filename, Model& model, const DynamicPrintConfig* config)
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
{
|
||||
mz_zip_archive archive;
|
||||
mz_zip_zero_struct(&archive);
|
||||
|
@ -1803,6 +1877,19 @@ namespace Slic3r {
|
|||
return false;
|
||||
}
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
if ((thumbnail_data != nullptr) && thumbnail_data->is_valid())
|
||||
{
|
||||
// Adds the file Metadata/thumbnail.png.
|
||||
if (!_add_thumbnail_file_to_archive(archive, *thumbnail_data))
|
||||
{
|
||||
close_zip_writer(&archive);
|
||||
boost::filesystem::remove(filename);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
// Adds relationships file ("_rels/.rels").
|
||||
// The content of this file is the same for each PrusaSlicer 3mf.
|
||||
// The relationshis file contains a reference to the geometry file "3D/3dmodel.model", the name was chosen to be compatible with CURA.
|
||||
|
@ -1896,6 +1983,9 @@ namespace Slic3r {
|
|||
stream << "<Types xmlns=\"http://schemas.openxmlformats.org/package/2006/content-types\">\n";
|
||||
stream << " <Default Extension=\"rels\" ContentType=\"application/vnd.openxmlformats-package.relationships+xml\" />\n";
|
||||
stream << " <Default Extension=\"model\" ContentType=\"application/vnd.ms-package.3dmanufacturing-3dmodel+xml\" />\n";
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
stream << " <Default Extension=\"png\" ContentType=\"image/png\" />\n";
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
stream << "</Types>";
|
||||
|
||||
std::string out = stream.str();
|
||||
|
@ -1909,12 +1999,35 @@ namespace Slic3r {
|
|||
return true;
|
||||
}
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
bool _3MF_Exporter::_add_thumbnail_file_to_archive(mz_zip_archive& archive, const ThumbnailData& thumbnail_data)
|
||||
{
|
||||
bool res = false;
|
||||
|
||||
size_t png_size = 0;
|
||||
void* png_data = tdefl_write_image_to_png_file_in_memory_ex((const void*)thumbnail_data.pixels.data(), thumbnail_data.width, thumbnail_data.height, 4, &png_size, MZ_DEFAULT_LEVEL, 1);
|
||||
if (png_data != nullptr)
|
||||
{
|
||||
res = mz_zip_writer_add_mem(&archive, THUMBNAIL_FILE.c_str(), (const void*)png_data, png_size, MZ_DEFAULT_COMPRESSION);
|
||||
mz_free(png_data);
|
||||
}
|
||||
|
||||
if (!res)
|
||||
add_error("Unable to add thumbnail file to archive");
|
||||
|
||||
return res;
|
||||
}
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
bool _3MF_Exporter::_add_relationships_file_to_archive(mz_zip_archive& archive)
|
||||
{
|
||||
std::stringstream stream;
|
||||
stream << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
|
||||
stream << "<Relationships xmlns=\"http://schemas.openxmlformats.org/package/2006/relationships\">\n";
|
||||
stream << " <Relationship Target=\"/" << MODEL_FILE << "\" Id=\"rel-1\" Type=\"http://schemas.microsoft.com/3dmanufacturing/2013/01/3dmodel\" />\n";
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
stream << " <Relationship Target=\"/" << THUMBNAIL_FILE << "\" Id=\"rel-2\" Type=\"http://schemas.openxmlformats.org/package/2006/relationships/metadata/thumbnail\" />\n";
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
stream << "</Relationships>";
|
||||
|
||||
std::string out = stream.str();
|
||||
|
@ -2116,7 +2229,7 @@ namespace Slic3r {
|
|||
|
||||
for (const BuildItem& item : build_items)
|
||||
{
|
||||
stream << " <" << ITEM_TAG << " objectid=\"" << item.id << "\" transform =\"";
|
||||
stream << " <" << ITEM_TAG << " " << OBJECTID_ATTR << "=\"" << item.id << "\" " << TRANSFORM_ATTR << "=\"";
|
||||
for (unsigned c = 0; c < 4; ++c)
|
||||
{
|
||||
for (unsigned r = 0; r < 3; ++r)
|
||||
|
@ -2126,7 +2239,7 @@ namespace Slic3r {
|
|||
stream << " ";
|
||||
}
|
||||
}
|
||||
stream << "\" printable =\"" << item.printable << "\" />\n";
|
||||
stream << "\" " << PRINTABLE_ATTR << "=\"" << item.printable << "\" />\n";
|
||||
}
|
||||
|
||||
stream << " </" << BUILD_TAG << ">\n";
|
||||
|
@ -2344,6 +2457,31 @@ namespace Slic3r {
|
|||
stream << " <" << METADATA_TAG << " " << TYPE_ATTR << "=\"" << VOLUME_TYPE << "\" " << KEY_ATTR << "=\"" << VOLUME_TYPE_KEY << "\" " <<
|
||||
VALUE_ATTR << "=\"" << ModelVolume::type_to_string(volume->type()) << "\"/>\n";
|
||||
|
||||
// stores volume's local matrix
|
||||
stream << " <" << METADATA_TAG << " " << TYPE_ATTR << "=\"" << VOLUME_TYPE << "\" " << KEY_ATTR << "=\"" << MATRIX_KEY << "\" " << VALUE_ATTR << "=\"";
|
||||
const Transform3d& matrix = volume->get_matrix();
|
||||
for (int r = 0; r < 4; ++r)
|
||||
{
|
||||
for (int c = 0; c < 4; ++c)
|
||||
{
|
||||
stream << matrix(r, c);
|
||||
if ((r != 3) || (c != 3))
|
||||
stream << " ";
|
||||
}
|
||||
}
|
||||
stream << "\"/>\n";
|
||||
|
||||
// stores volume's source data
|
||||
if (!volume->source.input_file.empty())
|
||||
{
|
||||
stream << " <" << METADATA_TAG << " " << TYPE_ATTR << "=\"" << VOLUME_TYPE << "\" " << KEY_ATTR << "=\"" << SOURCE_FILE_KEY << "\" " << VALUE_ATTR << "=\"" << xml_escape(volume->source.input_file) << "\"/>\n";
|
||||
stream << " <" << METADATA_TAG << " " << TYPE_ATTR << "=\"" << VOLUME_TYPE << "\" " << KEY_ATTR << "=\"" << SOURCE_OBJECT_ID_KEY << "\" " << VALUE_ATTR << "=\"" << volume->source.object_idx << "\"/>\n";
|
||||
stream << " <" << METADATA_TAG << " " << TYPE_ATTR << "=\"" << VOLUME_TYPE << "\" " << KEY_ATTR << "=\"" << SOURCE_VOLUME_ID_KEY << "\" " << VALUE_ATTR << "=\"" << volume->source.volume_idx << "\"/>\n";
|
||||
stream << " <" << METADATA_TAG << " " << TYPE_ATTR << "=\"" << VOLUME_TYPE << "\" " << KEY_ATTR << "=\"" << SOURCE_OFFSET_X_KEY << "\" " << VALUE_ATTR << "=\"" << volume->source.mesh_offset(0) << "\"/>\n";
|
||||
stream << " <" << METADATA_TAG << " " << TYPE_ATTR << "=\"" << VOLUME_TYPE << "\" " << KEY_ATTR << "=\"" << SOURCE_OFFSET_Y_KEY << "\" " << VALUE_ATTR << "=\"" << volume->source.mesh_offset(1) << "\"/>\n";
|
||||
stream << " <" << METADATA_TAG << " " << TYPE_ATTR << "=\"" << VOLUME_TYPE << "\" " << KEY_ATTR << "=\"" << SOURCE_OFFSET_Z_KEY << "\" " << VALUE_ATTR << "=\"" << volume->source.mesh_offset(2) << "\"/>\n";
|
||||
}
|
||||
|
||||
// stores volume's config data
|
||||
for (const std::string& key : volume->config.keys())
|
||||
{
|
||||
|
@ -2383,13 +2521,21 @@ namespace Slic3r {
|
|||
return res;
|
||||
}
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
bool store_3mf(const char* path, Model* model, const DynamicPrintConfig* config, const ThumbnailData* thumbnail_data)
|
||||
#else
|
||||
bool store_3mf(const char* path, Model* model, const DynamicPrintConfig* config)
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
{
|
||||
if ((path == nullptr) || (model == nullptr))
|
||||
return false;
|
||||
|
||||
_3MF_Exporter exporter;
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
bool res = exporter.save_model_to_file(path, *model, config, thumbnail_data);
|
||||
#else
|
||||
bool res = exporter.save_model_to_file(path, *model, config);
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
if (!res)
|
||||
exporter.log_errors();
|
||||
|
|
|
@ -22,13 +22,20 @@ namespace Slic3r {
|
|||
|
||||
class Model;
|
||||
class DynamicPrintConfig;
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
struct ThumbnailData;
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
// Load the content of a 3mf file into the given model and preset bundle.
|
||||
extern bool load_3mf(const char* path, DynamicPrintConfig* config, Model* model, bool check_version);
|
||||
|
||||
// Save the given model and the config data contained in the given Print into a 3mf file.
|
||||
// The model could be modified during the export process if meshes are not repaired or have no shared vertices
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
extern bool store_3mf(const char* path, Model* model, const DynamicPrintConfig* config, const ThumbnailData* thumbnail_data = nullptr);
|
||||
#else
|
||||
extern bool store_3mf(const char* path, Model* model, const DynamicPrintConfig* config);
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
}; // namespace Slic3r
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#include "../PrintConfig.hpp"
|
||||
#include "../Utils.hpp"
|
||||
#include "../I18N.hpp"
|
||||
#include "../Geometry.hpp"
|
||||
|
||||
#include "AMF.hpp"
|
||||
|
||||
|
@ -36,7 +37,8 @@
|
|||
// Added x and y components of rotation
|
||||
// Added x, y and z components of scale
|
||||
// Added x, y and z components of mirror
|
||||
const unsigned int VERSION_AMF = 2;
|
||||
// 3 : Meshes saved in their local system; Added volumes' matrices and source data
|
||||
const unsigned int VERSION_AMF = 3;
|
||||
const char* SLIC3RPE_AMF_VERSION = "slic3rpe_amf_version";
|
||||
|
||||
const char* SLIC3R_CONFIG_TYPE = "slic3rpe_config";
|
||||
|
@ -560,15 +562,38 @@ void AMFParserContext::endElement(const char * /* name */)
|
|||
stl.stats.number_of_facets = int(m_volume_facets.size() / 3);
|
||||
stl.stats.original_num_facets = stl.stats.number_of_facets;
|
||||
stl_allocate(&stl);
|
||||
|
||||
Slic3r::Geometry::Transformation transform;
|
||||
if (m_version > 2)
|
||||
transform = m_volume->get_transformation();
|
||||
|
||||
Transform3d inv_matrix = transform.get_matrix().inverse();
|
||||
|
||||
for (size_t i = 0; i < m_volume_facets.size();) {
|
||||
stl_facet &facet = stl.facet_start[i/3];
|
||||
for (unsigned int v = 0; v < 3; ++ v)
|
||||
memcpy(facet.vertex[v].data(), &m_object_vertices[m_volume_facets[i ++] * 3], 3 * sizeof(float));
|
||||
for (unsigned int v = 0; v < 3; ++v)
|
||||
{
|
||||
unsigned int tri_id = m_volume_facets[i++] * 3;
|
||||
Vec3f vertex(m_object_vertices[tri_id + 0], m_object_vertices[tri_id + 1], m_object_vertices[tri_id + 2]);
|
||||
if (m_version > 2)
|
||||
// revert the vertices to the original mesh reference system
|
||||
vertex = (inv_matrix * vertex.cast<double>()).cast<float>();
|
||||
::memcpy((void*)facet.vertex[v].data(), (const void*)vertex.data(), 3 * sizeof(float));
|
||||
}
|
||||
}
|
||||
stl_get_size(&stl);
|
||||
mesh.repair();
|
||||
m_volume->set_mesh(std::move(mesh));
|
||||
m_volume->center_geometry_after_creation();
|
||||
if (m_volume->source.input_file.empty() && (m_volume->type() == ModelVolumeType::MODEL_PART))
|
||||
{
|
||||
m_volume->source.object_idx = (int)m_model.objects.size() - 1;
|
||||
m_volume->source.volume_idx = (int)m_model.objects.back()->volumes.size() - 1;
|
||||
m_volume->center_geometry_after_creation();
|
||||
}
|
||||
else
|
||||
// pass false if the mesh offset has been already taken from the data
|
||||
m_volume->center_geometry_after_creation(m_volume->source.input_file.empty());
|
||||
|
||||
m_volume->calculate_convex_hull();
|
||||
m_volume_facets.clear();
|
||||
m_volume = nullptr;
|
||||
|
@ -664,6 +689,29 @@ void AMFParserContext::endElement(const char * /* name */)
|
|||
} else if (strcmp(opt_key, "volume_type") == 0) {
|
||||
m_volume->set_type(ModelVolume::type_from_string(m_value[1]));
|
||||
}
|
||||
else if (strcmp(opt_key, "matrix") == 0) {
|
||||
Geometry::Transformation transform;
|
||||
transform.set_from_string(m_value[1]);
|
||||
m_volume->set_transformation(transform);
|
||||
}
|
||||
else if (strcmp(opt_key, "source_file") == 0) {
|
||||
m_volume->source.input_file = m_value[1];
|
||||
}
|
||||
else if (strcmp(opt_key, "source_object_id") == 0) {
|
||||
m_volume->source.object_idx = ::atoi(m_value[1].c_str());
|
||||
}
|
||||
else if (strcmp(opt_key, "source_volume_id") == 0) {
|
||||
m_volume->source.volume_idx = ::atoi(m_value[1].c_str());
|
||||
}
|
||||
else if (strcmp(opt_key, "source_offset_x") == 0) {
|
||||
m_volume->source.mesh_offset(0) = ::atof(m_value[1].c_str());
|
||||
}
|
||||
else if (strcmp(opt_key, "source_offset_y") == 0) {
|
||||
m_volume->source.mesh_offset(1) = ::atof(m_value[1].c_str());
|
||||
}
|
||||
else if (strcmp(opt_key, "source_offset_z") == 0) {
|
||||
m_volume->source.mesh_offset(2) = ::atof(m_value[1].c_str());
|
||||
}
|
||||
}
|
||||
} else if (m_path.size() == 3) {
|
||||
if (m_path[1] == NODE_TYPE_MATERIAL) {
|
||||
|
@ -759,6 +807,15 @@ bool load_amf_file(const char *path, DynamicPrintConfig *config, Model *model)
|
|||
if (result)
|
||||
ctx.endDocument();
|
||||
|
||||
for (ModelObject* o : model->objects)
|
||||
{
|
||||
for (ModelVolume* v : o->volumes)
|
||||
{
|
||||
if (v->source.input_file.empty() && (v->type() == ModelVolumeType::MODEL_PART))
|
||||
v->source.input_file = path;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -1057,7 +1114,28 @@ bool store_amf(const char *path, Model *model, const DynamicPrintConfig *config)
|
|||
if (volume->is_modifier())
|
||||
stream << " <metadata type=\"slic3r.modifier\">1</metadata>\n";
|
||||
stream << " <metadata type=\"slic3r.volume_type\">" << ModelVolume::type_to_string(volume->type()) << "</metadata>\n";
|
||||
const indexed_triangle_set &its = volume->mesh().its;
|
||||
stream << " <metadata type=\"slic3r.matrix\">";
|
||||
const Transform3d& matrix = volume->get_matrix();
|
||||
for (int r = 0; r < 4; ++r)
|
||||
{
|
||||
for (int c = 0; c < 4; ++c)
|
||||
{
|
||||
stream << matrix(r, c);
|
||||
if ((r != 3) || (c != 3))
|
||||
stream << " ";
|
||||
}
|
||||
}
|
||||
stream << "</metadata>\n";
|
||||
if (!volume->source.input_file.empty())
|
||||
{
|
||||
stream << " <metadata type=\"slic3r.source_file\">" << xml_escape(volume->source.input_file) << "</metadata>\n";
|
||||
stream << " <metadata type=\"slic3r.source_object_id\">" << volume->source.object_idx << "</metadata>\n";
|
||||
stream << " <metadata type=\"slic3r.source_volume_id\">" << volume->source.volume_idx << "</metadata>\n";
|
||||
stream << " <metadata type=\"slic3r.source_offset_x\">" << volume->source.mesh_offset(0) << "</metadata>\n";
|
||||
stream << " <metadata type=\"slic3r.source_offset_y\">" << volume->source.mesh_offset(1) << "</metadata>\n";
|
||||
stream << " <metadata type=\"slic3r.source_offset_z\">" << volume->source.mesh_offset(2) << "</metadata>\n";
|
||||
}
|
||||
const indexed_triangle_set &its = volume->mesh().its;
|
||||
for (size_t i = 0; i < its.indices.size(); ++i) {
|
||||
stream << " <triangle>\n";
|
||||
for (int j = 0; j < 3; ++j)
|
||||
|
|
|
@ -15,39 +15,41 @@
|
|||
|
||||
namespace Slic3r {
|
||||
|
||||
bool load_obj(const char *path, Model *model, const char *object_name_in)
|
||||
bool load_obj(const char *path, TriangleMesh *meshptr)
|
||||
{
|
||||
if(meshptr == nullptr) return false;
|
||||
|
||||
// Parse the OBJ file.
|
||||
ObjParser::ObjData data;
|
||||
if (! ObjParser::objparse(path, data)) {
|
||||
// die "Failed to parse $file\n" if !-e $path;
|
||||
// die "Failed to parse $file\n" if !-e $path;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
// Count the faces and verify, that all faces are triangular.
|
||||
size_t num_faces = 0;
|
||||
size_t num_quads = 0;
|
||||
size_t num_quads = 0;
|
||||
for (size_t i = 0; i < data.vertices.size(); ) {
|
||||
size_t j = i;
|
||||
for (; j < data.vertices.size() && data.vertices[j].coordIdx != -1; ++ j) ;
|
||||
if (i == j)
|
||||
continue;
|
||||
size_t face_vertices = j - i;
|
||||
if (face_vertices != 3 && face_vertices != 4) {
|
||||
size_t face_vertices = j - i;
|
||||
if (face_vertices != 3 && face_vertices != 4) {
|
||||
// Non-triangular and non-quad faces are not supported as of now.
|
||||
return false;
|
||||
}
|
||||
if (face_vertices == 4)
|
||||
++ num_quads;
|
||||
++ num_faces;
|
||||
if (face_vertices == 4)
|
||||
++ num_quads;
|
||||
++ num_faces;
|
||||
i = j + 1;
|
||||
}
|
||||
|
||||
|
||||
// Convert ObjData into STL.
|
||||
TriangleMesh mesh;
|
||||
TriangleMesh &mesh = *meshptr;
|
||||
stl_file &stl = mesh.stl;
|
||||
stl.stats.type = inmemory;
|
||||
stl.stats.number_of_facets = int(num_faces + num_quads);
|
||||
stl.stats.number_of_facets = uint32_t(num_faces + num_quads);
|
||||
stl.stats.original_num_facets = int(num_faces + num_quads);
|
||||
// stl_allocate clears all the allocated data to zero, all normals are set to zeros as well.
|
||||
stl_allocate(&stl);
|
||||
|
@ -68,14 +70,14 @@ bool load_obj(const char *path, Model *model, const char *object_name_in)
|
|||
++ num_normals;
|
||||
}
|
||||
}
|
||||
if (data.vertices[i].coordIdx != -1) {
|
||||
// This is a quad. Produce the other triangle.
|
||||
stl_facet &facet2 = stl.facet_start[i_face++];
|
||||
if (data.vertices[i].coordIdx != -1) {
|
||||
// This is a quad. Produce the other triangle.
|
||||
stl_facet &facet2 = stl.facet_start[i_face++];
|
||||
facet2.vertex[0] = facet.vertex[0];
|
||||
facet2.vertex[1] = facet.vertex[2];
|
||||
const ObjParser::ObjVertex &vertex = data.vertices[i++];
|
||||
memcpy(facet2.vertex[2].data(), &data.coordinates[vertex.coordIdx * 4], 3 * sizeof(float));
|
||||
if (vertex.normalIdx != -1) {
|
||||
const ObjParser::ObjVertex &vertex = data.vertices[i++];
|
||||
memcpy(facet2.vertex[2].data(), &data.coordinates[vertex.coordIdx * 4], 3 * sizeof(float));
|
||||
if (vertex.normalIdx != -1) {
|
||||
normal(0) += data.normals[vertex.normalIdx*3];
|
||||
normal(1) += data.normals[vertex.normalIdx*3+1];
|
||||
normal(2) += data.normals[vertex.normalIdx*3+2];
|
||||
|
@ -96,25 +98,37 @@ bool load_obj(const char *path, Model *model, const char *object_name_in)
|
|||
if (len > EPSILON)
|
||||
facet.normal = normal / len;
|
||||
}
|
||||
}
|
||||
}
|
||||
stl_get_size(&stl);
|
||||
mesh.repair();
|
||||
if (mesh.facets_count() == 0) {
|
||||
// die "This STL file couldn't be read because it's empty.\n"
|
||||
// die "This OBJ file couldn't be read because it's empty.\n"
|
||||
return false;
|
||||
}
|
||||
|
||||
std::string object_name;
|
||||
if (object_name_in == nullptr) {
|
||||
const char *last_slash = strrchr(path, DIR_SEPARATOR);
|
||||
object_name.assign((last_slash == nullptr) ? path : last_slash + 1);
|
||||
} else
|
||||
object_name.assign(object_name_in);
|
||||
|
||||
model->add_object(object_name.c_str(), path, std::move(mesh));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool load_obj(const char *path, Model *model, const char *object_name_in)
|
||||
{
|
||||
TriangleMesh mesh;
|
||||
|
||||
bool ret = load_obj(path, &mesh);
|
||||
|
||||
if (ret) {
|
||||
std::string object_name;
|
||||
if (object_name_in == nullptr) {
|
||||
const char *last_slash = strrchr(path, DIR_SEPARATOR);
|
||||
object_name.assign((last_slash == nullptr) ? path : last_slash + 1);
|
||||
} else
|
||||
object_name.assign(object_name_in);
|
||||
|
||||
model->add_object(object_name.c_str(), path, std::move(mesh));
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
bool store_obj(const char *path, TriangleMesh *mesh)
|
||||
{
|
||||
//FIXME returning false even if write failed.
|
||||
|
|
|
@ -5,8 +5,10 @@ namespace Slic3r {
|
|||
|
||||
class TriangleMesh;
|
||||
class Model;
|
||||
class ModelObject;
|
||||
|
||||
// Load an OBJ file into a provided model.
|
||||
extern bool load_obj(const char *path, TriangleMesh *mesh);
|
||||
extern bool load_obj(const char *path, Model *model, const char *object_name = nullptr);
|
||||
|
||||
extern bool store_obj(const char *path, TriangleMesh *mesh);
|
||||
|
|
|
@ -6,6 +6,10 @@
|
|||
#include "Geometry.hpp"
|
||||
#include "GCode/PrintExtents.hpp"
|
||||
#include "GCode/WipeTower.hpp"
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
#include "GCode/ThumbnailData.hpp"
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
#include "ShortestPath.hpp"
|
||||
#include "Utils.hpp"
|
||||
|
||||
#include <algorithm>
|
||||
|
@ -17,6 +21,9 @@
|
|||
#include <boost/foreach.hpp>
|
||||
#include <boost/filesystem.hpp>
|
||||
#include <boost/log/trivial.hpp>
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
#include <boost/beast/core/detail/base64.hpp>
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
#include <boost/nowide/iostream.hpp>
|
||||
#include <boost/nowide/cstdio.hpp>
|
||||
|
@ -28,6 +35,10 @@
|
|||
|
||||
#include <Shiny/Shiny.h>
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR_PNG_TO_GCODE
|
||||
#include "miniz_extension.hpp"
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR_PNG_TO_GCODE
|
||||
|
||||
#if 0
|
||||
// Enable debugging and asserts, even in the release build.
|
||||
#define DEBUG
|
||||
|
@ -116,11 +127,11 @@ Polygons AvoidCrossingPerimeters::collect_contours_all_layers(const PrintObjectP
|
|||
const Layer* layer1 = object->layers()[i * 2];
|
||||
const Layer* layer2 = object->layers()[i * 2 + 1];
|
||||
Polygons polys;
|
||||
polys.reserve(layer1->slices.expolygons.size() + layer2->slices.expolygons.size());
|
||||
for (const ExPolygon &expoly : layer1->slices.expolygons)
|
||||
polys.reserve(layer1->slices.size() + layer2->slices.size());
|
||||
for (const ExPolygon &expoly : layer1->slices)
|
||||
//FIXME no holes?
|
||||
polys.emplace_back(expoly.contour);
|
||||
for (const ExPolygon &expoly : layer2->slices.expolygons)
|
||||
for (const ExPolygon &expoly : layer2->slices)
|
||||
//FIXME no holes?
|
||||
polys.emplace_back(expoly.contour);
|
||||
polygons_per_layer[i] = union_(polys);
|
||||
|
@ -129,8 +140,8 @@ Polygons AvoidCrossingPerimeters::collect_contours_all_layers(const PrintObjectP
|
|||
if (object->layers().size() & 1) {
|
||||
const Layer *layer = object->layers().back();
|
||||
Polygons polys;
|
||||
polys.reserve(layer->slices.expolygons.size());
|
||||
for (const ExPolygon &expoly : layer->slices.expolygons)
|
||||
polys.reserve(layer->slices.size());
|
||||
for (const ExPolygon &expoly : layer->slices)
|
||||
//FIXME no holes?
|
||||
polys.emplace_back(expoly.contour);
|
||||
polygons_per_layer.back() = union_(polys);
|
||||
|
@ -506,7 +517,7 @@ std::string WipeTowerIntegration::prime(GCode &gcodegen)
|
|||
std::string WipeTowerIntegration::tool_change(GCode &gcodegen, int extruder_id, bool finish_layer)
|
||||
{
|
||||
std::string gcode;
|
||||
assert(m_layer_idx >= 0 && size_t(m_layer_idx) <= m_tool_changes.size());
|
||||
assert(m_layer_idx >= 0);
|
||||
if (! m_brim_done || gcodegen.writer().need_toolchange(extruder_id) || finish_layer) {
|
||||
if (m_layer_idx < (int)m_tool_changes.size()) {
|
||||
if (! (size_t(m_tool_change_idx) < m_tool_changes[m_layer_idx].size()))
|
||||
|
@ -542,7 +553,7 @@ std::vector<GCode::LayerToPrint> GCode::collect_layers_to_print(const PrintObjec
|
|||
//FIXME should we use the printing extruders instead?
|
||||
double gap_over_supports = object.config().support_material_contact_distance;
|
||||
// FIXME should we test object.config().support_material_synchronize_layers ? Currently the support layers are synchronized with object layers iff soluble supports.
|
||||
assert(gap_over_supports != 0. || object.config().support_material_synchronize_layers);
|
||||
assert(! object.config().support_material || gap_over_supports != 0. || object.config().support_material_synchronize_layers);
|
||||
if (gap_over_supports != 0.) {
|
||||
gap_over_supports = std::max(0., gap_over_supports);
|
||||
// Not a soluble support,
|
||||
|
@ -651,7 +662,11 @@ std::vector<std::pair<coordf_t, std::vector<GCode::LayerToPrint>>> GCode::collec
|
|||
return layers_to_print;
|
||||
}
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
void GCode::do_export(Print* print, const char* path, GCodePreviewData* preview_data, const std::vector<ThumbnailData>* thumbnail_data)
|
||||
#else
|
||||
void GCode::do_export(Print *print, const char *path, GCodePreviewData *preview_data)
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
{
|
||||
PROFILE_CLEAR();
|
||||
|
||||
|
@ -677,7 +692,11 @@ void GCode::do_export(Print *print, const char *path, GCodePreviewData *preview_
|
|||
|
||||
try {
|
||||
m_placeholder_parser_failed_templates.clear();
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
this->_do_export(*print, file, thumbnail_data);
|
||||
#else
|
||||
this->_do_export(*print, file);
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
fflush(file);
|
||||
if (ferror(file)) {
|
||||
fclose(file);
|
||||
|
@ -741,7 +760,11 @@ void GCode::do_export(Print *print, const char *path, GCodePreviewData *preview_
|
|||
PROFILE_OUTPUT(debug_out_path("gcode-export-profile.txt").c_str());
|
||||
}
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
void GCode::_do_export(Print& print, FILE* file, const std::vector<ThumbnailData>* thumbnail_data)
|
||||
#else
|
||||
void GCode::_do_export(Print &print, FILE *file)
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
{
|
||||
PROFILE_FUNC();
|
||||
|
||||
|
@ -777,22 +800,26 @@ void GCode::_do_export(Print &print, FILE *file)
|
|||
{
|
||||
m_silent_time_estimator.reset();
|
||||
m_silent_time_estimator.set_dialect(print.config().gcode_flavor);
|
||||
m_silent_time_estimator.set_max_acceleration((float)print.config().machine_max_acceleration_extruding.values[1]);
|
||||
m_silent_time_estimator.set_retract_acceleration((float)print.config().machine_max_acceleration_retracting.values[1]);
|
||||
m_silent_time_estimator.set_minimum_feedrate((float)print.config().machine_min_extruding_rate.values[1]);
|
||||
m_silent_time_estimator.set_minimum_travel_feedrate((float)print.config().machine_min_travel_rate.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_acceleration(GCodeTimeEstimator::X, (float)print.config().machine_max_acceleration_x.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_acceleration(GCodeTimeEstimator::Y, (float)print.config().machine_max_acceleration_y.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_acceleration(GCodeTimeEstimator::Z, (float)print.config().machine_max_acceleration_z.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_acceleration(GCodeTimeEstimator::E, (float)print.config().machine_max_acceleration_e.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_feedrate(GCodeTimeEstimator::X, (float)print.config().machine_max_feedrate_x.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_feedrate(GCodeTimeEstimator::Y, (float)print.config().machine_max_feedrate_y.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_feedrate(GCodeTimeEstimator::Z, (float)print.config().machine_max_feedrate_z.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_feedrate(GCodeTimeEstimator::E, (float)print.config().machine_max_feedrate_e.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_jerk(GCodeTimeEstimator::X, (float)print.config().machine_max_jerk_x.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_jerk(GCodeTimeEstimator::Y, (float)print.config().machine_max_jerk_y.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_jerk(GCodeTimeEstimator::Z, (float)print.config().machine_max_jerk_z.values[1]);
|
||||
m_silent_time_estimator.set_axis_max_jerk(GCodeTimeEstimator::E, (float)print.config().machine_max_jerk_e.values[1]);
|
||||
/* "Stealth mode" values can be just a copy of "normal mode" values
|
||||
* (when they aren't input for a printer preset).
|
||||
* Thus, use back value from values, instead of second one, which could be absent
|
||||
*/
|
||||
m_silent_time_estimator.set_max_acceleration((float)print.config().machine_max_acceleration_extruding.values.back());
|
||||
m_silent_time_estimator.set_retract_acceleration((float)print.config().machine_max_acceleration_retracting.values.back());
|
||||
m_silent_time_estimator.set_minimum_feedrate((float)print.config().machine_min_extruding_rate.values.back());
|
||||
m_silent_time_estimator.set_minimum_travel_feedrate((float)print.config().machine_min_travel_rate.values.back());
|
||||
m_silent_time_estimator.set_axis_max_acceleration(GCodeTimeEstimator::X, (float)print.config().machine_max_acceleration_x.values.back());
|
||||
m_silent_time_estimator.set_axis_max_acceleration(GCodeTimeEstimator::Y, (float)print.config().machine_max_acceleration_y.values.back());
|
||||
m_silent_time_estimator.set_axis_max_acceleration(GCodeTimeEstimator::Z, (float)print.config().machine_max_acceleration_z.values.back());
|
||||
m_silent_time_estimator.set_axis_max_acceleration(GCodeTimeEstimator::E, (float)print.config().machine_max_acceleration_e.values.back());
|
||||
m_silent_time_estimator.set_axis_max_feedrate(GCodeTimeEstimator::X, (float)print.config().machine_max_feedrate_x.values.back());
|
||||
m_silent_time_estimator.set_axis_max_feedrate(GCodeTimeEstimator::Y, (float)print.config().machine_max_feedrate_y.values.back());
|
||||
m_silent_time_estimator.set_axis_max_feedrate(GCodeTimeEstimator::Z, (float)print.config().machine_max_feedrate_z.values.back());
|
||||
m_silent_time_estimator.set_axis_max_feedrate(GCodeTimeEstimator::E, (float)print.config().machine_max_feedrate_e.values.back());
|
||||
m_silent_time_estimator.set_axis_max_jerk(GCodeTimeEstimator::X, (float)print.config().machine_max_jerk_x.values.back());
|
||||
m_silent_time_estimator.set_axis_max_jerk(GCodeTimeEstimator::Y, (float)print.config().machine_max_jerk_y.values.back());
|
||||
m_silent_time_estimator.set_axis_max_jerk(GCodeTimeEstimator::Z, (float)print.config().machine_max_jerk_z.values.back());
|
||||
m_silent_time_estimator.set_axis_max_jerk(GCodeTimeEstimator::E, (float)print.config().machine_max_jerk_e.values.back());
|
||||
if (print.config().single_extruder_multi_material) {
|
||||
// As of now the fields are shown at the UI dialog in the same combo box as the ramming values, so they
|
||||
// are considered to be active for the single extruder multi-material printers only.
|
||||
|
@ -929,6 +956,77 @@ void GCode::_do_export(Print &print, FILE *file)
|
|||
|
||||
// Write information on the generator.
|
||||
_write_format(file, "; %s\n\n", Slic3r::header_slic3r_generated().c_str());
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
// Write thumbnails using base64 encoding
|
||||
if (thumbnail_data != nullptr)
|
||||
{
|
||||
const unsigned int max_row_length = 78;
|
||||
|
||||
for (const ThumbnailData& data : *thumbnail_data)
|
||||
{
|
||||
if (data.is_valid())
|
||||
{
|
||||
#if ENABLE_THUMBNAIL_GENERATOR_PNG_TO_GCODE
|
||||
size_t png_size = 0;
|
||||
void* png_data = tdefl_write_image_to_png_file_in_memory_ex((const void*)data.pixels.data(), data.width, data.height, 4, &png_size, MZ_DEFAULT_LEVEL, 1);
|
||||
if (png_data != nullptr)
|
||||
{
|
||||
_write_format(file, "\n;\n; thumbnail begin %dx%d\n", data.width, data.height);
|
||||
|
||||
std::string encoded = boost::beast::detail::base64_encode((const std::uint8_t*)png_data, png_size);
|
||||
|
||||
unsigned int row_count = 0;
|
||||
while (encoded.length() > max_row_length)
|
||||
{
|
||||
_write_format(file, "; %s\n", encoded.substr(0, max_row_length).c_str());
|
||||
encoded = encoded.substr(max_row_length);
|
||||
++row_count;
|
||||
}
|
||||
|
||||
if (encoded.length() > 0)
|
||||
_write_format(file, "; %s\n", encoded.c_str());
|
||||
|
||||
_write(file, "; thumbnail end\n;\n");
|
||||
|
||||
mz_free(png_data);
|
||||
}
|
||||
#else
|
||||
_write_format(file, "\n;\n; thumbnail begin %dx%d\n", data.width, data.height);
|
||||
|
||||
size_t row_size = 4 * data.width;
|
||||
for (int r = (int)data.height - 1; r >= 0; --r)
|
||||
{
|
||||
std::string encoded = boost::beast::detail::base64_encode((const std::uint8_t*)(data.pixels.data() + r * row_size), row_size);
|
||||
unsigned int row_count = 0;
|
||||
while (encoded.length() > max_row_length)
|
||||
{
|
||||
if (row_count == 0)
|
||||
_write_format(file, "; %s\n", encoded.substr(0, max_row_length).c_str());
|
||||
else
|
||||
_write_format(file, ";>%s\n", encoded.substr(0, max_row_length).c_str());
|
||||
|
||||
encoded = encoded.substr(max_row_length);
|
||||
++row_count;
|
||||
}
|
||||
|
||||
if (encoded.length() > 0)
|
||||
{
|
||||
if (row_count == 0)
|
||||
_write_format(file, "; %s\n", encoded.c_str());
|
||||
else
|
||||
_write_format(file, ";>%s\n", encoded.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
_write(file, "; thumbnail end\n;\n");
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR_PNG_TO_GCODE
|
||||
}
|
||||
print.throw_if_canceled();
|
||||
}
|
||||
}
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
// Write notes (content of the Print Settings tab -> Notes)
|
||||
{
|
||||
std::list<std::string> lines;
|
||||
|
@ -970,6 +1068,9 @@ void GCode::_do_export(Print &print, FILE *file)
|
|||
_writeln(file, GCodeTimeEstimator::Silent_First_M73_Output_Placeholder_Tag);
|
||||
}
|
||||
|
||||
// Hold total number of print toolchanges. Check for negative toolchanges (single extruder mode) and set to 0 (no tool change).
|
||||
int total_toolchanges = std::max(0, print.wipe_tower_data().number_of_toolchanges);
|
||||
|
||||
// Prepare the helper object for replacing placeholders in custom G-code and output filename.
|
||||
m_placeholder_parser = print.placeholder_parser();
|
||||
m_placeholder_parser.update_timestamp();
|
||||
|
@ -1032,6 +1133,7 @@ void GCode::_do_export(Print &print, FILE *file)
|
|||
// For the start / end G-code to do the priming and final filament pull in case there is no wipe tower provided.
|
||||
m_placeholder_parser.set("has_wipe_tower", has_wipe_tower);
|
||||
m_placeholder_parser.set("has_single_extruder_multi_material_priming", has_wipe_tower && print.config().single_extruder_multi_material_priming);
|
||||
m_placeholder_parser.set("total_toolchanges", total_toolchanges);
|
||||
std::string start_gcode = this->placeholder_parser_process("start_gcode", print.config().start_gcode.value, initial_extruder_id);
|
||||
// Set bed temperature if the start G-code does not contain any bed temp control G-codes.
|
||||
this->_print_first_layer_bed_temperature(file, print, start_gcode, initial_extruder_id, true);
|
||||
|
@ -1160,7 +1262,7 @@ void GCode::_do_export(Print &print, FILE *file)
|
|||
for (const LayerToPrint <p : layers_to_print) {
|
||||
std::vector<LayerToPrint> lrs;
|
||||
lrs.emplace_back(std::move(ltp));
|
||||
this->process_layer(file, print, lrs, tool_ordering.tools_for_layer(ltp.print_z()), © - object.copies().data());
|
||||
this->process_layer(file, print, lrs, tool_ordering.tools_for_layer(ltp.print_z()), nullptr, © - object.copies().data());
|
||||
print.throw_if_canceled();
|
||||
}
|
||||
#ifdef HAS_PRESSURE_EQUALIZER
|
||||
|
@ -1174,12 +1276,8 @@ void GCode::_do_export(Print &print, FILE *file)
|
|||
}
|
||||
}
|
||||
} else {
|
||||
// Order objects using a nearest neighbor search.
|
||||
std::vector<size_t> object_indices;
|
||||
Points object_reference_points;
|
||||
for (PrintObject *object : print.objects())
|
||||
object_reference_points.push_back(object->copies().front());
|
||||
Slic3r::Geometry::chained_path(object_reference_points, object_indices);
|
||||
// Order object instances using a nearest neighbor search.
|
||||
std::vector<std::pair<size_t, size_t>> print_object_instances_ordering = chain_print_object_instances(print);
|
||||
// Sort layers by Z.
|
||||
// All extrusion moves with the same top layer height are extruded uninterrupted.
|
||||
std::vector<std::pair<coordf_t, std::vector<LayerToPrint>>> layers_to_print = collect_layers_to_print(print);
|
||||
|
@ -1218,7 +1316,7 @@ void GCode::_do_export(Print &print, FILE *file)
|
|||
const LayerTools &layer_tools = tool_ordering.tools_for_layer(layer.first);
|
||||
if (m_wipe_tower && layer_tools.has_wipe_tower)
|
||||
m_wipe_tower->next_layer();
|
||||
this->process_layer(file, print, layer.second, layer_tools, size_t(-1));
|
||||
this->process_layer(file, print, layer.second, layer_tools, &print_object_instances_ordering, size_t(-1));
|
||||
print.throw_if_canceled();
|
||||
}
|
||||
#ifdef HAS_PRESSURE_EQUALIZER
|
||||
|
@ -1286,7 +1384,7 @@ void GCode::_do_export(Print &print, FILE *file)
|
|||
print.m_print_statistics.estimated_normal_color_print_times = m_normal_time_estimator.get_color_times_dhms(true);
|
||||
if (m_silent_time_estimator_enabled)
|
||||
print.m_print_statistics.estimated_silent_color_print_times = m_silent_time_estimator.get_color_times_dhms(true);
|
||||
|
||||
print.m_print_statistics.total_toolchanges = total_toolchanges;
|
||||
std::vector<Extruder> extruders = m_writer.extruders();
|
||||
if (! extruders.empty()) {
|
||||
std::pair<std::string, unsigned int> out_filament_used_mm ("; filament used [mm] = ", 0);
|
||||
|
@ -1336,6 +1434,8 @@ void GCode::_do_export(Print &print, FILE *file)
|
|||
}
|
||||
_write_format(file, "; total filament used [g] = %.1lf\n", print.m_print_statistics.total_weight);
|
||||
_write_format(file, "; total filament cost = %.1lf\n", print.m_print_statistics.total_cost);
|
||||
if (print.m_print_statistics.total_toolchanges > 0)
|
||||
_write_format(file, "; total toolchanges = %i\n", print.m_print_statistics.total_toolchanges);
|
||||
_write_format(file, "; estimated printing time (normal mode) = %s\n", m_normal_time_estimator.get_time_dhms().c_str());
|
||||
if (m_silent_time_estimator_enabled)
|
||||
_write_format(file, "; estimated printing time (silent mode) = %s\n", m_silent_time_estimator.get_time_dhms().c_str());
|
||||
|
@ -1529,8 +1629,54 @@ inline std::vector<GCode::ObjectByExtruder::Island>& object_islands_by_extruder(
|
|||
return islands;
|
||||
}
|
||||
|
||||
std::vector<GCode::InstanceToPrint> GCode::sort_print_object_instances(
|
||||
std::vector<GCode::ObjectByExtruder> &objects_by_extruder,
|
||||
const std::vector<LayerToPrint> &layers,
|
||||
// Ordering must be defined for normal (non-sequential print).
|
||||
const std::vector<std::pair<size_t, size_t>> *ordering,
|
||||
// For sequential print, the instance of the object to be printing has to be defined.
|
||||
const size_t single_object_instance_idx)
|
||||
{
|
||||
std::vector<InstanceToPrint> out;
|
||||
|
||||
if (ordering == nullptr) {
|
||||
// Sequential print, single object is being printed.
|
||||
for (ObjectByExtruder &object_by_extruder : objects_by_extruder) {
|
||||
const size_t layer_id = &object_by_extruder - objects_by_extruder.data();
|
||||
const PrintObject *print_object = layers[layer_id].object();
|
||||
if (print_object)
|
||||
out.emplace_back(object_by_extruder, layer_id, *print_object, single_object_instance_idx);
|
||||
}
|
||||
} else {
|
||||
// Create mapping from PrintObject* to ObjectByExtruder*.
|
||||
std::vector<std::pair<const PrintObject*, ObjectByExtruder*>> sorted;
|
||||
sorted.reserve(objects_by_extruder.size());
|
||||
for (ObjectByExtruder &object_by_extruder : objects_by_extruder) {
|
||||
const size_t layer_id = &object_by_extruder - objects_by_extruder.data();
|
||||
const PrintObject *print_object = layers[layer_id].object();
|
||||
if (print_object)
|
||||
sorted.emplace_back(print_object, &object_by_extruder);
|
||||
}
|
||||
std::sort(sorted.begin(), sorted.end());
|
||||
|
||||
if (! sorted.empty()) {
|
||||
const Print &print = *sorted.front().first->print();
|
||||
out.reserve(sorted.size());
|
||||
for (const std::pair<size_t, size_t> &instance_id : *ordering) {
|
||||
const PrintObject &print_object = *print.objects()[instance_id.first];
|
||||
std::pair<const PrintObject*, ObjectByExtruder*> key(&print_object, nullptr);
|
||||
auto it = std::lower_bound(sorted.begin(), sorted.end(), key);
|
||||
if (it != sorted.end() && it->first == &print_object)
|
||||
// ObjectByExtruder for this PrintObject was found.
|
||||
out.emplace_back(*it->second, it->second - objects_by_extruder.data(), print_object, instance_id.second);
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// In sequential mode, process_layer is called once per each object and its copy,
|
||||
// therefore layers will contain a single entry and single_object_idx will point to the copy of the object.
|
||||
// therefore layers will contain a single entry and single_object_instance_idx will point to the copy of the object.
|
||||
// In non-sequential mode, process_layer is called per each print_z height with all object and support layers accumulated.
|
||||
// For multi-material prints, this routine minimizes extruder switches by gathering extruder specific extrusion paths
|
||||
// and performing the extruder specific extrusions together.
|
||||
|
@ -1541,14 +1687,16 @@ void GCode::process_layer(
|
|||
// Set of object & print layers of the same PrintObject and with the same print_z.
|
||||
const std::vector<LayerToPrint> &layers,
|
||||
const LayerTools &layer_tools,
|
||||
// Pairs of PrintObject index and its instance index.
|
||||
const std::vector<std::pair<size_t, size_t>> *ordering,
|
||||
// If set to size_t(-1), then print all copies of all objects.
|
||||
// Otherwise print a single copy of a single object.
|
||||
const size_t single_object_idx)
|
||||
const size_t single_object_instance_idx)
|
||||
{
|
||||
assert(! layers.empty());
|
||||
// assert(! layer_tools.extruders.empty());
|
||||
// Either printing all copies of all objects, or just a single copy of a single object.
|
||||
assert(single_object_idx == size_t(-1) || layers.size() == 1);
|
||||
assert(single_object_instance_idx == size_t(-1) || layers.size() == 1);
|
||||
|
||||
if (layer_tools.extruders.empty())
|
||||
// Nothing to extrude.
|
||||
|
@ -1757,16 +1905,24 @@ void GCode::process_layer(
|
|||
// - for each island, we extrude perimeters first, unless user set the infill_first
|
||||
// option
|
||||
// (Still, we have to keep track of regions because we need to apply their config)
|
||||
size_t n_slices = layer.slices.expolygons.size();
|
||||
std::vector<BoundingBox> layer_surface_bboxes;
|
||||
layer_surface_bboxes.reserve(n_slices);
|
||||
for (const ExPolygon &expoly : layer.slices.expolygons)
|
||||
layer_surface_bboxes.push_back(get_extents(expoly.contour));
|
||||
size_t n_slices = layer.slices.size();
|
||||
const std::vector<BoundingBox> &layer_surface_bboxes = layer.slices_bboxes;
|
||||
// Traverse the slices in an increasing order of bounding box size, so that the islands inside another islands are tested first,
|
||||
// so we can just test a point inside ExPolygon::contour and we may skip testing the holes.
|
||||
std::vector<size_t> slices_test_order;
|
||||
slices_test_order.reserve(n_slices);
|
||||
for (size_t i = 0; i < n_slices; ++ i)
|
||||
slices_test_order.emplace_back(i);
|
||||
std::sort(slices_test_order.begin(), slices_test_order.end(), [&layer_surface_bboxes](int i, int j) {
|
||||
const Vec2d s1 = layer_surface_bboxes[i].size().cast<double>();
|
||||
const Vec2d s2 = layer_surface_bboxes[j].size().cast<double>();
|
||||
return s1.x() * s1.y() < s2.x() * s2.y();
|
||||
});
|
||||
auto point_inside_surface = [&layer, &layer_surface_bboxes](const size_t i, const Point &point) {
|
||||
const BoundingBox &bbox = layer_surface_bboxes[i];
|
||||
return point(0) >= bbox.min(0) && point(0) < bbox.max(0) &&
|
||||
point(1) >= bbox.min(1) && point(1) < bbox.max(1) &&
|
||||
layer.slices.expolygons[i].contour.contains(point);
|
||||
layer.slices[i].contour.contains(point);
|
||||
};
|
||||
|
||||
for (size_t region_id = 0; region_id < print.regions().size(); ++ region_id) {
|
||||
|
@ -1809,16 +1965,19 @@ void GCode::process_layer(
|
|||
extruder,
|
||||
&layer_to_print - layers.data(),
|
||||
layers.size(), n_slices+1);
|
||||
for (size_t i = 0; i <= n_slices; ++i)
|
||||
for (size_t i = 0; i <= n_slices; ++ i) {
|
||||
bool last = i == n_slices;
|
||||
size_t island_idx = last ? n_slices : slices_test_order[i];
|
||||
if (// fill->first_point does not fit inside any slice
|
||||
i == n_slices ||
|
||||
last ||
|
||||
// fill->first_point fits inside ith slice
|
||||
point_inside_surface(i, fill->first_point())) {
|
||||
if (islands[i].by_region.empty())
|
||||
islands[i].by_region.assign(print.regions().size(), ObjectByExtruder::Island::Region());
|
||||
islands[i].by_region[region_id].append(entity_type, fill, entity_overrides, layer_to_print.object()->copies().size());
|
||||
point_inside_surface(island_idx, fill->first_point())) {
|
||||
if (islands[island_idx].by_region.empty())
|
||||
islands[island_idx].by_region.assign(print.regions().size(), ObjectByExtruder::Island::Region());
|
||||
islands[island_idx].by_region[region_id].append(entity_type, fill, entity_overrides, layer_to_print.object()->copies().size());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1883,62 +2042,49 @@ void GCode::process_layer(
|
|||
if (objects_by_extruder_it == by_extruder.end())
|
||||
continue;
|
||||
|
||||
std::vector<InstanceToPrint> instances_to_print = sort_print_object_instances(objects_by_extruder_it->second, layers, ordering, single_object_instance_idx);
|
||||
|
||||
// We are almost ready to print. However, we must go through all the objects twice to print the the overridden extrusions first (infill/perimeter wiping feature):
|
||||
bool is_anything_overridden = const_cast<LayerTools&>(layer_tools).wiping_extrusions().is_anything_overridden();
|
||||
for (int print_wipe_extrusions = is_anything_overridden; print_wipe_extrusions>=0; --print_wipe_extrusions) {
|
||||
if (is_anything_overridden && print_wipe_extrusions == 0)
|
||||
gcode+="; PURGING FINISHED\n";
|
||||
|
||||
for (ObjectByExtruder &object_by_extruder : objects_by_extruder_it->second) {
|
||||
const size_t layer_id = &object_by_extruder - objects_by_extruder_it->second.data();
|
||||
const PrintObject *print_object = layers[layer_id].object();
|
||||
if (print_object == nullptr)
|
||||
// This layer is empty for this particular object, it has neither object extrusions nor support extrusions at this print_z.
|
||||
continue;
|
||||
|
||||
m_config.apply(print_object->config(), true);
|
||||
m_layer = layers[layer_id].layer();
|
||||
for (InstanceToPrint &instance_to_print : instances_to_print) {
|
||||
m_config.apply(instance_to_print.print_object.config(), true);
|
||||
m_layer = layers[instance_to_print.layer_id].layer();
|
||||
if (m_config.avoid_crossing_perimeters)
|
||||
m_avoid_crossing_perimeters.init_layer_mp(union_ex(m_layer->slices, true));
|
||||
Points copies;
|
||||
if (single_object_idx == size_t(-1))
|
||||
copies = print_object->copies();
|
||||
else
|
||||
copies.push_back(print_object->copies()[single_object_idx]);
|
||||
// Sort the copies by the closest point starting with the current print position.
|
||||
|
||||
unsigned int copy_id = 0;
|
||||
for (const Point © : copies) {
|
||||
if (this->config().gcode_label_objects)
|
||||
gcode += std::string("; printing object ") + print_object->model_object()->name + " id:" + std::to_string(layer_id) + " copy " + std::to_string(copy_id) + "\n";
|
||||
// When starting a new object, use the external motion planner for the first travel move.
|
||||
std::pair<const PrintObject*, Point> this_object_copy(print_object, copy);
|
||||
if (m_last_obj_copy != this_object_copy)
|
||||
m_avoid_crossing_perimeters.use_external_mp_once = true;
|
||||
m_last_obj_copy = this_object_copy;
|
||||
this->set_origin(unscale(copy));
|
||||
if (object_by_extruder.support != nullptr && !print_wipe_extrusions) {
|
||||
m_layer = layers[layer_id].support_layer;
|
||||
gcode += this->extrude_support(
|
||||
// support_extrusion_role is erSupportMaterial, erSupportMaterialInterface or erMixed for all extrusion paths.
|
||||
object_by_extruder.support->chained_path_from(m_last_pos, false, object_by_extruder.support_extrusion_role));
|
||||
m_layer = layers[layer_id].layer();
|
||||
}
|
||||
for (ObjectByExtruder::Island &island : object_by_extruder.islands) {
|
||||
const auto& by_region_specific = is_anything_overridden ? island.by_region_per_copy(copy_id, extruder_id, print_wipe_extrusions) : island.by_region;
|
||||
|
||||
if (print.config().infill_first) {
|
||||
gcode += this->extrude_infill(print, by_region_specific);
|
||||
gcode += this->extrude_perimeters(print, by_region_specific, lower_layer_edge_grids[layer_id]);
|
||||
} else {
|
||||
gcode += this->extrude_perimeters(print, by_region_specific, lower_layer_edge_grids[layer_id]);
|
||||
gcode += this->extrude_infill(print,by_region_specific);
|
||||
}
|
||||
}
|
||||
if (this->config().gcode_label_objects)
|
||||
gcode += std::string("; stop printing object ") + print_object->model_object()->name + " id:" + std::to_string(layer_id) + " copy " + std::to_string(copy_id) + "\n";
|
||||
++ copy_id;
|
||||
if (this->config().gcode_label_objects)
|
||||
gcode += std::string("; printing object ") + instance_to_print.print_object.model_object()->name + " id:" + std::to_string(instance_to_print.layer_id) + " copy " + std::to_string(instance_to_print.instance_id) + "\n";
|
||||
// When starting a new object, use the external motion planner for the first travel move.
|
||||
const Point &offset = instance_to_print.print_object.copies()[instance_to_print.instance_id];
|
||||
std::pair<const PrintObject*, Point> this_object_copy(&instance_to_print.print_object, offset);
|
||||
if (m_last_obj_copy != this_object_copy)
|
||||
m_avoid_crossing_perimeters.use_external_mp_once = true;
|
||||
m_last_obj_copy = this_object_copy;
|
||||
this->set_origin(unscale(offset));
|
||||
if (instance_to_print.object_by_extruder.support != nullptr && !print_wipe_extrusions) {
|
||||
m_layer = layers[instance_to_print.layer_id].support_layer;
|
||||
gcode += this->extrude_support(
|
||||
// support_extrusion_role is erSupportMaterial, erSupportMaterialInterface or erMixed for all extrusion paths.
|
||||
instance_to_print.object_by_extruder.support->chained_path_from(m_last_pos, instance_to_print.object_by_extruder.support_extrusion_role));
|
||||
m_layer = layers[instance_to_print.layer_id].layer();
|
||||
}
|
||||
for (ObjectByExtruder::Island &island : instance_to_print.object_by_extruder.islands) {
|
||||
const auto& by_region_specific = is_anything_overridden ? island.by_region_per_copy(instance_to_print.instance_id, extruder_id, print_wipe_extrusions) : island.by_region;
|
||||
|
||||
if (print.config().infill_first) {
|
||||
gcode += this->extrude_infill(print, by_region_specific);
|
||||
gcode += this->extrude_perimeters(print, by_region_specific, lower_layer_edge_grids[instance_to_print.layer_id]);
|
||||
} else {
|
||||
gcode += this->extrude_perimeters(print, by_region_specific, lower_layer_edge_grids[instance_to_print.layer_id]);
|
||||
gcode += this->extrude_infill(print,by_region_specific);
|
||||
}
|
||||
}
|
||||
if (this->config().gcode_label_objects)
|
||||
gcode += std::string("; stop printing object ") + instance_to_print.print_object.model_object()->name + " id:" + std::to_string(instance_to_print.layer_id) + " copy " + std::to_string(instance_to_print.instance_id) + "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2372,7 +2518,7 @@ std::string GCode::extrude_loop(ExtrusionLoop loop, std::string description, dou
|
|||
static int iRun = 0;
|
||||
SVG svg(debug_out_path("GCode_extrude_loop-%d.svg", iRun ++));
|
||||
if (m_layer->lower_layer != NULL)
|
||||
svg.draw(m_layer->lower_layer->slices.expolygons);
|
||||
svg.draw(m_layer->lower_layer->slices);
|
||||
for (size_t i = 0; i < loop.paths.size(); ++ i)
|
||||
svg.draw(loop.paths[i].as_polyline(), "red");
|
||||
Polylines polylines;
|
||||
|
@ -2542,12 +2688,10 @@ std::string GCode::extrude_infill(const Print &print, const std::vector<ObjectBy
|
|||
std::string gcode;
|
||||
for (const ObjectByExtruder::Island::Region ®ion : by_region) {
|
||||
m_config.apply(print.regions()[®ion - &by_region.front()]->config());
|
||||
ExtrusionEntityCollection chained = region.infills.chained_path_from(m_last_pos, false);
|
||||
for (ExtrusionEntity *fill : chained.entities) {
|
||||
for (ExtrusionEntity *fill : region.infills.chained_path_from(m_last_pos).entities) {
|
||||
auto *eec = dynamic_cast<ExtrusionEntityCollection*>(fill);
|
||||
if (eec) {
|
||||
ExtrusionEntityCollection chained2 = eec->chained_path_from(m_last_pos, false);
|
||||
for (ExtrusionEntity *ee : chained2.entities)
|
||||
for (ExtrusionEntity *ee : eec->chained_path_from(m_last_pos).entities)
|
||||
gcode += this->extrude_entity(*ee, "infill");
|
||||
} else
|
||||
gcode += this->extrude_entity(*fill, "infill");
|
||||
|
|
|
@ -30,6 +30,9 @@ namespace Slic3r {
|
|||
// Forward declarations.
|
||||
class GCode;
|
||||
class GCodePreviewData;
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
struct ThumbnailData;
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
class AvoidCrossingPerimeters {
|
||||
public:
|
||||
|
@ -162,7 +165,11 @@ public:
|
|||
|
||||
// throws std::runtime_exception on error,
|
||||
// throws CanceledException through print->throw_if_canceled().
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
void do_export(Print* print, const char* path, GCodePreviewData* preview_data = nullptr, const std::vector<ThumbnailData>* thumbnail_data = nullptr);
|
||||
#else
|
||||
void do_export(Print *print, const char *path, GCodePreviewData *preview_data = nullptr);
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
// Exported for the helper classes (OozePrevention, Wipe) and for the Perl binding for unit tests.
|
||||
const Vec2d& origin() const { return m_origin; }
|
||||
|
@ -190,7 +197,11 @@ public:
|
|||
static void append_full_config(const Print& print, std::string& str);
|
||||
|
||||
protected:
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
void _do_export(Print& print, FILE* file, const std::vector<ThumbnailData>* thumbnail_data);
|
||||
#else
|
||||
void _do_export(Print &print, FILE *file);
|
||||
#endif //ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
// Object and support extrusions of the same PrintObject at the same print_z.
|
||||
struct LayerToPrint
|
||||
|
@ -202,7 +213,7 @@ protected:
|
|||
const PrintObject* object() const { return (this->layer() != nullptr) ? this->layer()->object() : nullptr; }
|
||||
coordf_t print_z() const { return (object_layer != nullptr && support_layer != nullptr) ? 0.5 * (object_layer->print_z + support_layer->print_z) : this->layer()->print_z; }
|
||||
};
|
||||
static std::vector<GCode::LayerToPrint> collect_layers_to_print(const PrintObject &object);
|
||||
static std::vector<LayerToPrint> collect_layers_to_print(const PrintObject &object);
|
||||
static std::vector<std::pair<coordf_t, std::vector<LayerToPrint>>> collect_layers_to_print(const Print &print);
|
||||
void process_layer(
|
||||
// Write into the output file.
|
||||
|
@ -210,7 +221,9 @@ protected:
|
|||
const Print &print,
|
||||
// Set of object & print layers of the same PrintObject and with the same print_z.
|
||||
const std::vector<LayerToPrint> &layers,
|
||||
const LayerTools &layer_tools,
|
||||
const LayerTools &layer_tools,
|
||||
// Pairs of PrintObject index and its instance index.
|
||||
const std::vector<std::pair<size_t, size_t>> *ordering,
|
||||
// If set to size_t(-1), then print all copies of all objects.
|
||||
// Otherwise print a single copy of a single object.
|
||||
const size_t single_object_idx = size_t(-1));
|
||||
|
@ -258,6 +271,25 @@ protected:
|
|||
std::vector<Island> islands;
|
||||
};
|
||||
|
||||
struct InstanceToPrint
|
||||
{
|
||||
InstanceToPrint(ObjectByExtruder &object_by_extruder, size_t layer_id, const PrintObject &print_object, size_t instance_id) :
|
||||
object_by_extruder(object_by_extruder), layer_id(layer_id), print_object(print_object), instance_id(instance_id) {}
|
||||
|
||||
ObjectByExtruder &object_by_extruder;
|
||||
const size_t layer_id;
|
||||
const PrintObject &print_object;
|
||||
// Instance idx of the copy of a print object.
|
||||
const size_t instance_id;
|
||||
};
|
||||
|
||||
std::vector<InstanceToPrint> sort_print_object_instances(
|
||||
std::vector<ObjectByExtruder> &objects_by_extruder,
|
||||
const std::vector<LayerToPrint> &layers,
|
||||
// Ordering must be defined for normal (non-sequential print).
|
||||
const std::vector<std::pair<size_t, size_t>> *ordering,
|
||||
// For sequential print, the instance of the object to be printing has to be defined.
|
||||
const size_t single_object_instance_idx);
|
||||
|
||||
std::string extrude_perimeters(const Print &print, const std::vector<ObjectByExtruder::Island::Region> &by_region, std::unique_ptr<EdgeGrid::Grid> &lower_layer_edge_grid);
|
||||
std::string extrude_infill(const Print &print, const std::vector<ObjectByExtruder::Island::Region> &by_region);
|
||||
|
|
|
@ -20,6 +20,7 @@ static const unsigned int DEFAULT_EXTRUDER_ID = 0;
|
|||
static const unsigned int DEFAULT_COLOR_PRINT_ID = 0;
|
||||
static const Slic3r::Vec3d DEFAULT_START_POSITION = Slic3r::Vec3d(0.0f, 0.0f, 0.0f);
|
||||
static const float DEFAULT_START_EXTRUSION = 0.0f;
|
||||
static const float DEFAULT_FAN_SPEED = 0.0f;
|
||||
|
||||
namespace Slic3r {
|
||||
|
||||
|
@ -36,21 +37,23 @@ const float GCodeAnalyzer::Default_Height = 0.0f;
|
|||
GCodeAnalyzer::Metadata::Metadata()
|
||||
: extrusion_role(erNone)
|
||||
, extruder_id(DEFAULT_EXTRUDER_ID)
|
||||
, cp_color_id(DEFAULT_COLOR_PRINT_ID)
|
||||
, mm3_per_mm(GCodeAnalyzer::Default_mm3_per_mm)
|
||||
, width(GCodeAnalyzer::Default_Width)
|
||||
, height(GCodeAnalyzer::Default_Height)
|
||||
, feedrate(DEFAULT_FEEDRATE)
|
||||
, fan_speed(DEFAULT_FAN_SPEED)
|
||||
, cp_color_id(DEFAULT_COLOR_PRINT_ID)
|
||||
{
|
||||
}
|
||||
|
||||
GCodeAnalyzer::Metadata::Metadata(ExtrusionRole extrusion_role, unsigned int extruder_id, double mm3_per_mm, float width, float height, float feedrate, unsigned int cp_color_id/* = 0*/)
|
||||
GCodeAnalyzer::Metadata::Metadata(ExtrusionRole extrusion_role, unsigned int extruder_id, double mm3_per_mm, float width, float height, float feedrate, float fan_speed, unsigned int cp_color_id/* = 0*/)
|
||||
: extrusion_role(extrusion_role)
|
||||
, extruder_id(extruder_id)
|
||||
, mm3_per_mm(mm3_per_mm)
|
||||
, width(width)
|
||||
, height(height)
|
||||
, feedrate(feedrate)
|
||||
, fan_speed(fan_speed)
|
||||
, cp_color_id(cp_color_id)
|
||||
{
|
||||
}
|
||||
|
@ -75,15 +78,18 @@ bool GCodeAnalyzer::Metadata::operator != (const GCodeAnalyzer::Metadata& other)
|
|||
if (feedrate != other.feedrate)
|
||||
return true;
|
||||
|
||||
if (fan_speed != other.fan_speed)
|
||||
return true;
|
||||
|
||||
if (cp_color_id != other.cp_color_id)
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
GCodeAnalyzer::GCodeMove::GCodeMove(GCodeMove::EType type, ExtrusionRole extrusion_role, unsigned int extruder_id, double mm3_per_mm, float width, float height, float feedrate, const Vec3d& start_position, const Vec3d& end_position, float delta_extruder, unsigned int cp_color_id/* = 0*/)
|
||||
GCodeAnalyzer::GCodeMove::GCodeMove(GCodeMove::EType type, ExtrusionRole extrusion_role, unsigned int extruder_id, double mm3_per_mm, float width, float height, float feedrate, const Vec3d& start_position, const Vec3d& end_position, float delta_extruder, float fan_speed, unsigned int cp_color_id/* = 0*/)
|
||||
: type(type)
|
||||
, data(extrusion_role, extruder_id, mm3_per_mm, width, height, feedrate, cp_color_id)
|
||||
, data(extrusion_role, extruder_id, mm3_per_mm, width, height, feedrate, fan_speed, cp_color_id)
|
||||
, start_position(start_position)
|
||||
, end_position(end_position)
|
||||
, delta_extruder(delta_extruder)
|
||||
|
@ -133,7 +139,9 @@ void GCodeAnalyzer::reset()
|
|||
_set_feedrate(DEFAULT_FEEDRATE);
|
||||
_set_start_position(DEFAULT_START_POSITION);
|
||||
_set_start_extrusion(DEFAULT_START_EXTRUSION);
|
||||
_set_fan_speed(DEFAULT_FAN_SPEED);
|
||||
_reset_axes_position();
|
||||
_reset_axes_origin();
|
||||
_reset_cached_position();
|
||||
|
||||
m_moves_map.clear();
|
||||
|
@ -259,6 +267,16 @@ void GCodeAnalyzer::_process_gcode_line(GCodeReader&, const GCodeReader::GCodeLi
|
|||
_processM83(line);
|
||||
break;
|
||||
}
|
||||
case 106: // Set fan speed
|
||||
{
|
||||
_processM106(line);
|
||||
break;
|
||||
}
|
||||
case 107: // Disable fan
|
||||
{
|
||||
_processM107(line);
|
||||
break;
|
||||
}
|
||||
case 108:
|
||||
case 135:
|
||||
{
|
||||
|
@ -267,6 +285,11 @@ void GCodeAnalyzer::_process_gcode_line(GCodeReader&, const GCodeReader::GCodeLi
|
|||
_processM108orM135(line);
|
||||
break;
|
||||
}
|
||||
case 132: // Recall stored home offsets
|
||||
{
|
||||
_processM132(line);
|
||||
break;
|
||||
}
|
||||
case 401: // Repetier: Store x, y and z position
|
||||
{
|
||||
_processM401(line);
|
||||
|
@ -293,31 +316,32 @@ void GCodeAnalyzer::_process_gcode_line(GCodeReader&, const GCodeReader::GCodeLi
|
|||
m_process_output += line.raw() + "\n";
|
||||
}
|
||||
|
||||
// Returns the new absolute position on the given axis in dependence of the given parameters
|
||||
float axis_absolute_position_from_G1_line(GCodeAnalyzer::EAxis axis, const GCodeReader::GCodeLine& lineG1, GCodeAnalyzer::EUnits units, bool is_relative, float current_absolute_position)
|
||||
{
|
||||
float lengthsScaleFactor = (units == GCodeAnalyzer::Inches) ? INCHES_TO_MM : 1.0f;
|
||||
if (lineG1.has(Slic3r::Axis(axis)))
|
||||
{
|
||||
float ret = lineG1.value(Slic3r::Axis(axis)) * lengthsScaleFactor;
|
||||
return is_relative ? current_absolute_position + ret : ret;
|
||||
}
|
||||
else
|
||||
return current_absolute_position;
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_processG1(const GCodeReader::GCodeLine& line)
|
||||
{
|
||||
auto axis_absolute_position = [this](GCodeAnalyzer::EAxis axis, const GCodeReader::GCodeLine& lineG1) -> float
|
||||
{
|
||||
float current_absolute_position = _get_axis_position(axis);
|
||||
float current_origin = _get_axis_origin(axis);
|
||||
float lengthsScaleFactor = (_get_units() == GCodeAnalyzer::Inches) ? INCHES_TO_MM : 1.0f;
|
||||
|
||||
bool is_relative = (_get_global_positioning_type() == Relative);
|
||||
if (axis == E)
|
||||
is_relative |= (_get_e_local_positioning_type() == Relative);
|
||||
|
||||
if (lineG1.has(Slic3r::Axis(axis)))
|
||||
{
|
||||
float ret = lineG1.value(Slic3r::Axis(axis)) * lengthsScaleFactor;
|
||||
return is_relative ? current_absolute_position + ret : ret + current_origin;
|
||||
}
|
||||
else
|
||||
return current_absolute_position;
|
||||
};
|
||||
|
||||
// updates axes positions from line
|
||||
EUnits units = _get_units();
|
||||
float new_pos[Num_Axis];
|
||||
for (unsigned char a = X; a < Num_Axis; ++a)
|
||||
{
|
||||
bool is_relative = (_get_global_positioning_type() == Relative);
|
||||
if (a == E)
|
||||
is_relative |= (_get_e_local_positioning_type() == Relative);
|
||||
|
||||
new_pos[a] = axis_absolute_position_from_G1_line((EAxis)a, line, units, is_relative, _get_axis_position((EAxis)a));
|
||||
new_pos[a] = axis_absolute_position((EAxis)a, line);
|
||||
}
|
||||
|
||||
// updates feedrate from line, if present
|
||||
|
@ -407,25 +431,25 @@ void GCodeAnalyzer::_processG92(const GCodeReader::GCodeLine& line)
|
|||
|
||||
if (line.has_x())
|
||||
{
|
||||
_set_axis_position(X, line.x() * lengthsScaleFactor);
|
||||
_set_axis_origin(X, _get_axis_position(X) - line.x() * lengthsScaleFactor);
|
||||
anyFound = true;
|
||||
}
|
||||
|
||||
if (line.has_y())
|
||||
{
|
||||
_set_axis_position(Y, line.y() * lengthsScaleFactor);
|
||||
_set_axis_origin(Y, _get_axis_position(Y) - line.y() * lengthsScaleFactor);
|
||||
anyFound = true;
|
||||
}
|
||||
|
||||
if (line.has_z())
|
||||
{
|
||||
_set_axis_position(Z, line.z() * lengthsScaleFactor);
|
||||
_set_axis_origin(Z, _get_axis_position(Z) - line.z() * lengthsScaleFactor);
|
||||
anyFound = true;
|
||||
}
|
||||
|
||||
if (line.has_e())
|
||||
{
|
||||
_set_axis_position(E, line.e() * lengthsScaleFactor);
|
||||
_set_axis_origin(E, _get_axis_position(E) - line.e() * lengthsScaleFactor);
|
||||
anyFound = true;
|
||||
}
|
||||
|
||||
|
@ -433,7 +457,7 @@ void GCodeAnalyzer::_processG92(const GCodeReader::GCodeLine& line)
|
|||
{
|
||||
for (unsigned char a = X; a < Num_Axis; ++a)
|
||||
{
|
||||
_set_axis_position((EAxis)a, 0.0f);
|
||||
_set_axis_origin((EAxis)a, _get_axis_position((EAxis)a));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -448,6 +472,24 @@ void GCodeAnalyzer::_processM83(const GCodeReader::GCodeLine& line)
|
|||
_set_e_local_positioning_type(Relative);
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_processM106(const GCodeReader::GCodeLine& line)
|
||||
{
|
||||
if (!line.has('P'))
|
||||
{
|
||||
// The absence of P means the print cooling fan, so ignore anything else.
|
||||
float new_fan_speed;
|
||||
if (line.has_value('S', new_fan_speed))
|
||||
_set_fan_speed((100.0f / 256.0f) * new_fan_speed);
|
||||
else
|
||||
_set_fan_speed(100.0f);
|
||||
}
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_processM107(const GCodeReader::GCodeLine& line)
|
||||
{
|
||||
_set_fan_speed(0.0f);
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_processM108orM135(const GCodeReader::GCodeLine& line)
|
||||
{
|
||||
// These M-codes are used by MakerWare and Sailfish to change active tool.
|
||||
|
@ -467,6 +509,25 @@ void GCodeAnalyzer::_processM108orM135(const GCodeReader::GCodeLine& line)
|
|||
}
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_processM132(const GCodeReader::GCodeLine& line)
|
||||
{
|
||||
// This command is used by Makerbot to load the current home position from EEPROM
|
||||
// see: https://github.com/makerbot/s3g/blob/master/doc/GCodeProtocol.md
|
||||
// Using this command to reset the axis origin to zero helps in fixing: https://github.com/prusa3d/PrusaSlicer/issues/3082
|
||||
|
||||
if (line.has_x())
|
||||
_set_axis_origin(X, 0.0f);
|
||||
|
||||
if (line.has_y())
|
||||
_set_axis_origin(Y, 0.0f);
|
||||
|
||||
if (line.has_z())
|
||||
_set_axis_origin(Z, 0.0f);
|
||||
|
||||
if (line.has_e())
|
||||
_set_axis_origin(E, 0.0f);
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_processM401(const GCodeReader::GCodeLine& line)
|
||||
{
|
||||
if (m_gcode_flavor != gcfRepetier)
|
||||
|
@ -726,6 +787,16 @@ float GCodeAnalyzer::_get_feedrate() const
|
|||
return m_state.data.feedrate;
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_set_fan_speed(float fan_speed_percentage)
|
||||
{
|
||||
m_state.data.fan_speed = fan_speed_percentage;
|
||||
}
|
||||
|
||||
float GCodeAnalyzer::_get_fan_speed() const
|
||||
{
|
||||
return m_state.data.fan_speed;
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_set_axis_position(EAxis axis, float position)
|
||||
{
|
||||
m_state.position[axis] = position;
|
||||
|
@ -736,11 +807,26 @@ float GCodeAnalyzer::_get_axis_position(EAxis axis) const
|
|||
return m_state.position[axis];
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_set_axis_origin(EAxis axis, float position)
|
||||
{
|
||||
m_state.origin[axis] = position;
|
||||
}
|
||||
|
||||
float GCodeAnalyzer::_get_axis_origin(EAxis axis) const
|
||||
{
|
||||
return m_state.origin[axis];
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_reset_axes_position()
|
||||
{
|
||||
::memset((void*)m_state.position, 0, Num_Axis * sizeof(float));
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_reset_axes_origin()
|
||||
{
|
||||
::memset((void*)m_state.origin, 0, Num_Axis * sizeof(float));
|
||||
}
|
||||
|
||||
void GCodeAnalyzer::_set_start_position(const Vec3d& position)
|
||||
{
|
||||
m_state.start_position = position;
|
||||
|
@ -798,7 +884,7 @@ void GCodeAnalyzer::_store_move(GCodeAnalyzer::GCodeMove::EType type)
|
|||
|
||||
Vec3d start_position = _get_start_position() + extruder_offset;
|
||||
Vec3d end_position = _get_end_position() + extruder_offset;
|
||||
it->second.emplace_back(type, _get_extrusion_role(), extruder_id, _get_mm3_per_mm(), _get_width(), _get_height(), _get_feedrate(), start_position, end_position, _get_delta_extrusion(), _get_cp_color_id());
|
||||
it->second.emplace_back(type, _get_extrusion_role(), extruder_id, _get_mm3_per_mm(), _get_width(), _get_height(), _get_feedrate(), start_position, end_position, _get_delta_extrusion(), _get_fan_speed(), _get_cp_color_id());
|
||||
}
|
||||
|
||||
bool GCodeAnalyzer::_is_valid_extrusion_role(int value) const
|
||||
|
@ -821,7 +907,7 @@ void GCodeAnalyzer::_calc_gcode_preview_extrusion_layers(GCodePreviewData& previ
|
|||
}
|
||||
|
||||
// if layer not found, create and return it
|
||||
layers.emplace_back(z, ExtrusionPaths());
|
||||
layers.emplace_back(z, GCodePreviewData::Extrusion::Paths());
|
||||
return layers.back();
|
||||
}
|
||||
|
||||
|
@ -830,13 +916,18 @@ void GCodeAnalyzer::_calc_gcode_preview_extrusion_layers(GCodePreviewData& previ
|
|||
// if the polyline is valid, create the extrusion path from it and store it
|
||||
if (polyline.is_valid())
|
||||
{
|
||||
ExtrusionPath path(data.extrusion_role, data.mm3_per_mm, data.width, data.height);
|
||||
auto& paths = get_layer_at_z(preview_data.extrusion.layers, z).paths;
|
||||
paths.emplace_back(GCodePreviewData::Extrusion::Path());
|
||||
GCodePreviewData::Extrusion::Path &path = paths.back();
|
||||
path.polyline = polyline;
|
||||
path.extrusion_role = data.extrusion_role;
|
||||
path.mm3_per_mm = data.mm3_per_mm;
|
||||
path.width = data.width;
|
||||
path.height = data.height;
|
||||
path.feedrate = data.feedrate;
|
||||
path.extruder_id = data.extruder_id;
|
||||
path.cp_color_id = data.cp_color_id;
|
||||
|
||||
get_layer_at_z(preview_data.extrusion.layers, z).paths.push_back(path);
|
||||
path.fan_speed = data.fan_speed;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -854,6 +945,7 @@ void GCodeAnalyzer::_calc_gcode_preview_extrusion_layers(GCodePreviewData& previ
|
|||
GCodePreviewData::Range width_range;
|
||||
GCodePreviewData::Range feedrate_range;
|
||||
GCodePreviewData::Range volumetric_rate_range;
|
||||
GCodePreviewData::Range fan_speed_range;
|
||||
|
||||
// to avoid to call the callback too often
|
||||
unsigned int cancel_callback_threshold = (unsigned int)std::max((int)extrude_moves->second.size() / 25, 1);
|
||||
|
@ -888,6 +980,7 @@ void GCodeAnalyzer::_calc_gcode_preview_extrusion_layers(GCodePreviewData& previ
|
|||
width_range.update_from(move.data.width);
|
||||
feedrate_range.update_from(move.data.feedrate);
|
||||
volumetric_rate_range.update_from(volumetric_rate);
|
||||
fan_speed_range.update_from(move.data.fan_speed);
|
||||
}
|
||||
else
|
||||
// append end vertex of the move to current polyline
|
||||
|
@ -906,6 +999,7 @@ void GCodeAnalyzer::_calc_gcode_preview_extrusion_layers(GCodePreviewData& previ
|
|||
preview_data.ranges.width.update_from(width_range);
|
||||
preview_data.ranges.feedrate.update_from(feedrate_range);
|
||||
preview_data.ranges.volumetric_rate.update_from(volumetric_rate_range);
|
||||
preview_data.ranges.fan_speed.update_from(fan_speed_range);
|
||||
|
||||
// we need to sort the layers by their z as they can be shuffled in case of sequential prints
|
||||
std::sort(preview_data.extrusion.layers.begin(), preview_data.extrusion.layers.end(), [](const GCodePreviewData::Extrusion::Layer& l1, const GCodePreviewData::Extrusion::Layer& l2)->bool { return l1.z < l2.z; });
|
||||
|
|
|
@ -54,10 +54,11 @@ public:
|
|||
float width; // mm
|
||||
float height; // mm
|
||||
float feedrate; // mm/s
|
||||
float fan_speed; // percentage
|
||||
unsigned int cp_color_id;
|
||||
|
||||
Metadata();
|
||||
Metadata(ExtrusionRole extrusion_role, unsigned int extruder_id, double mm3_per_mm, float width, float height, float feedrate, unsigned int cp_color_id = 0);
|
||||
Metadata(ExtrusionRole extrusion_role, unsigned int extruder_id, double mm3_per_mm, float width, float height, float feedrate, float fan_speed, unsigned int cp_color_id = 0);
|
||||
|
||||
bool operator != (const Metadata& other) const;
|
||||
};
|
||||
|
@ -81,7 +82,7 @@ public:
|
|||
Vec3d end_position;
|
||||
float delta_extruder;
|
||||
|
||||
GCodeMove(EType type, ExtrusionRole extrusion_role, unsigned int extruder_id, double mm3_per_mm, float width, float height, float feedrate, const Vec3d& start_position, const Vec3d& end_position, float delta_extruder, unsigned int cp_color_id = 0);
|
||||
GCodeMove(EType type, ExtrusionRole extrusion_role, unsigned int extruder_id, double mm3_per_mm, float width, float height, float feedrate, const Vec3d& start_position, const Vec3d& end_position, float delta_extruder, float fan_speed, unsigned int cp_color_id = 0);
|
||||
GCodeMove(EType type, const Metadata& data, const Vec3d& start_position, const Vec3d& end_position, float delta_extruder);
|
||||
};
|
||||
|
||||
|
@ -100,6 +101,7 @@ private:
|
|||
float cached_position[5];
|
||||
float start_extrusion;
|
||||
float position[Num_Axis];
|
||||
float origin[Num_Axis];
|
||||
unsigned int cur_cp_color_id = 0;
|
||||
};
|
||||
|
||||
|
@ -171,9 +173,18 @@ private:
|
|||
// Set extruder to relative mode
|
||||
void _processM83(const GCodeReader::GCodeLine& line);
|
||||
|
||||
// Set fan speed
|
||||
void _processM106(const GCodeReader::GCodeLine& line);
|
||||
|
||||
// Disable fan
|
||||
void _processM107(const GCodeReader::GCodeLine& line);
|
||||
|
||||
// Set tool (MakerWare and Sailfish flavor)
|
||||
void _processM108orM135(const GCodeReader::GCodeLine& line);
|
||||
|
||||
// Recall stored home offsets
|
||||
void _processM132(const GCodeReader::GCodeLine& line);
|
||||
|
||||
// Repetier: Store x, y and z position
|
||||
void _processM401(const GCodeReader::GCodeLine& line);
|
||||
|
||||
|
@ -233,11 +244,19 @@ private:
|
|||
void _set_feedrate(float feedrate_mm_sec);
|
||||
float _get_feedrate() const;
|
||||
|
||||
void _set_fan_speed(float fan_speed_percentage);
|
||||
float _get_fan_speed() const;
|
||||
|
||||
void _set_axis_position(EAxis axis, float position);
|
||||
float _get_axis_position(EAxis axis) const;
|
||||
|
||||
void _set_axis_origin(EAxis axis, float position);
|
||||
float _get_axis_origin(EAxis axis) const;
|
||||
|
||||
// Sets axes position to zero
|
||||
void _reset_axes_position();
|
||||
// Sets origin position to zero
|
||||
void _reset_axes_origin();
|
||||
|
||||
void _set_start_position(const Vec3d& position);
|
||||
const Vec3d& _get_start_position() const;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#ifndef slic3r_CoolingBuffer_hpp_
|
||||
#define slic3r_CoolingBuffer_hpp_
|
||||
|
||||
#include "libslic3r.h"
|
||||
#include "../libslic3r.h"
|
||||
#include <map>
|
||||
#include <string>
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ std::vector<unsigned char> GCodePreviewData::Color::as_bytes() const
|
|||
return ret;
|
||||
}
|
||||
|
||||
GCodePreviewData::Extrusion::Layer::Layer(float z, const ExtrusionPaths& paths)
|
||||
GCodePreviewData::Extrusion::Layer::Layer(float z, const Paths& paths)
|
||||
: z(z)
|
||||
, paths(paths)
|
||||
{
|
||||
|
@ -171,8 +171,8 @@ size_t GCodePreviewData::Extrusion::memory_used() const
|
|||
size_t out = sizeof(*this);
|
||||
out += SLIC3R_STDVEC_MEMSIZE(this->layers, Layer);
|
||||
for (const Layer &layer : this->layers) {
|
||||
out += SLIC3R_STDVEC_MEMSIZE(layer.paths, ExtrusionPath);
|
||||
for (const ExtrusionPath &path : layer.paths)
|
||||
out += SLIC3R_STDVEC_MEMSIZE(layer.paths, Path);
|
||||
for (const Path &path : layer.paths)
|
||||
out += SLIC3R_STDVEC_MEMSIZE(path.polyline.points, Point);
|
||||
}
|
||||
return out;
|
||||
|
@ -241,6 +241,7 @@ void GCodePreviewData::set_default()
|
|||
::memcpy((void*)ranges.height.colors, (const void*)Range::Default_Colors, Range::Colors_Count * sizeof(Color));
|
||||
::memcpy((void*)ranges.width.colors, (const void*)Range::Default_Colors, Range::Colors_Count * sizeof(Color));
|
||||
::memcpy((void*)ranges.feedrate.colors, (const void*)Range::Default_Colors, Range::Colors_Count * sizeof(Color));
|
||||
::memcpy((void*)ranges.fan_speed.colors, (const void*)Range::Default_Colors, Range::Colors_Count * sizeof(Color));
|
||||
::memcpy((void*)ranges.volumetric_rate.colors, (const void*)Range::Default_Colors, Range::Colors_Count * sizeof(Color));
|
||||
|
||||
extrusion.set_default();
|
||||
|
@ -287,6 +288,11 @@ GCodePreviewData::Color GCodePreviewData::get_feedrate_color(float feedrate) con
|
|||
return ranges.feedrate.get_color_at(feedrate);
|
||||
}
|
||||
|
||||
GCodePreviewData::Color GCodePreviewData::get_fan_speed_color(float fan_speed) const
|
||||
{
|
||||
return ranges.fan_speed.get_color_at(fan_speed);
|
||||
}
|
||||
|
||||
GCodePreviewData::Color GCodePreviewData::get_volumetric_rate_color(float rate) const
|
||||
{
|
||||
return ranges.volumetric_rate.get_color_at(rate);
|
||||
|
@ -358,6 +364,8 @@ std::string GCodePreviewData::get_legend_title() const
|
|||
return L("Width (mm)");
|
||||
case Extrusion::Feedrate:
|
||||
return L("Speed (mm/s)");
|
||||
case Extrusion::FanSpeed:
|
||||
return L("Fan Speed (%)");
|
||||
case Extrusion::VolumetricRate:
|
||||
return L("Volumetric flow rate (mm³/s)");
|
||||
case Extrusion::Tool:
|
||||
|
@ -421,6 +429,11 @@ GCodePreviewData::LegendItemsList GCodePreviewData::get_legend_items(const std::
|
|||
Helper::FillListFromRange(items, ranges.feedrate, 1, 1.0f);
|
||||
break;
|
||||
}
|
||||
case Extrusion::FanSpeed:
|
||||
{
|
||||
Helper::FillListFromRange(items, ranges.fan_speed, 0, 1.0f);
|
||||
break;
|
||||
}
|
||||
case Extrusion::VolumetricRate:
|
||||
{
|
||||
Helper::FillListFromRange(items, ranges.volumetric_rate, 3, 1.0f);
|
||||
|
|
|
@ -52,6 +52,8 @@ public:
|
|||
Range width;
|
||||
// Color mapping by feedrate.
|
||||
Range feedrate;
|
||||
// Color mapping by fan speed.
|
||||
Range fan_speed;
|
||||
// Color mapping by volumetric extrusion rate.
|
||||
Range volumetric_rate;
|
||||
};
|
||||
|
@ -74,6 +76,7 @@ public:
|
|||
Height,
|
||||
Width,
|
||||
Feedrate,
|
||||
FanSpeed,
|
||||
VolumetricRate,
|
||||
Tool,
|
||||
ColorPrint,
|
||||
|
@ -84,12 +87,34 @@ public:
|
|||
static const std::string Default_Extrusion_Role_Names[erCount];
|
||||
static const EViewType Default_View_Type;
|
||||
|
||||
class Path
|
||||
{
|
||||
public:
|
||||
Polyline polyline;
|
||||
ExtrusionRole extrusion_role;
|
||||
// Volumetric velocity. mm^3 of plastic per mm of linear head motion. Used by the G-code generator.
|
||||
float mm3_per_mm;
|
||||
// Width of the extrusion, used for visualization purposes.
|
||||
float width;
|
||||
// Height of the extrusion, used for visualization purposes.
|
||||
float height;
|
||||
// Feedrate of the extrusion, used for visualization purposes.
|
||||
float feedrate;
|
||||
// Id of the extruder, used for visualization purposes.
|
||||
uint32_t extruder_id;
|
||||
// Id of the color, used for visualization purposes in the color printing case.
|
||||
uint32_t cp_color_id;
|
||||
// Fan speed for the extrusion, used for visualization purposes.
|
||||
float fan_speed;
|
||||
};
|
||||
using Paths = std::vector<Path>;
|
||||
|
||||
struct Layer
|
||||
{
|
||||
float z;
|
||||
ExtrusionPaths paths;
|
||||
Paths paths;
|
||||
|
||||
Layer(float z, const ExtrusionPaths& paths);
|
||||
Layer(float z, const Paths& paths);
|
||||
};
|
||||
|
||||
typedef std::vector<Layer> LayersList;
|
||||
|
@ -205,6 +230,7 @@ public:
|
|||
Color get_height_color(float height) const;
|
||||
Color get_width_color(float width) const;
|
||||
Color get_feedrate_color(float feedrate) const;
|
||||
Color get_fan_speed_color(float fan_speed) const;
|
||||
Color get_volumetric_rate_color(float rate) const;
|
||||
|
||||
void set_extrusion_role_color(const std::string& role_name, float red, float green, float blue, float alpha);
|
||||
|
|
|
@ -138,7 +138,7 @@ BoundingBoxf get_wipe_tower_extrusions_extents(const Print &print, const coordf_
|
|||
// We need to get position and angle of the wipe tower to transform them to actual position.
|
||||
Transform2d trafo =
|
||||
Eigen::Translation2d(print.config().wipe_tower_x.value, print.config().wipe_tower_y.value) *
|
||||
Eigen::Rotation2Dd(print.config().wipe_tower_rotation_angle.value);
|
||||
Eigen::Rotation2Dd(Geometry::deg2rad(print.config().wipe_tower_rotation_angle.value));
|
||||
|
||||
BoundingBoxf bbox;
|
||||
for (const std::vector<WipeTower::ToolChangeResult> &tool_changes : print.wipe_tower_data().tool_changes) {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
#ifndef slic3r_SpiralVase_hpp_
|
||||
#define slic3r_SpiralVase_hpp_
|
||||
|
||||
#include "libslic3r.h"
|
||||
#include "GCodeReader.hpp"
|
||||
#include "../libslic3r.h"
|
||||
#include "../GCodeReader.hpp"
|
||||
|
||||
namespace Slic3r {
|
||||
|
||||
|
|
36
src/libslic3r/GCode/ThumbnailData.cpp
Normal file
36
src/libslic3r/GCode/ThumbnailData.cpp
Normal file
|
@ -0,0 +1,36 @@
|
|||
#include "libslic3r/libslic3r.h"
|
||||
#include "ThumbnailData.hpp"
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
namespace Slic3r {
|
||||
|
||||
void ThumbnailData::set(unsigned int w, unsigned int h)
|
||||
{
|
||||
if ((w == 0) || (h == 0))
|
||||
return;
|
||||
|
||||
if ((width != w) || (height != h))
|
||||
{
|
||||
width = w;
|
||||
height = h;
|
||||
// defaults to white texture
|
||||
pixels = std::vector<unsigned char>(width * height * 4, 255);
|
||||
}
|
||||
}
|
||||
|
||||
void ThumbnailData::reset()
|
||||
{
|
||||
width = 0;
|
||||
height = 0;
|
||||
pixels.clear();
|
||||
}
|
||||
|
||||
bool ThumbnailData::is_valid() const
|
||||
{
|
||||
return (width != 0) && (height != 0) && ((unsigned int)pixels.size() == 4 * width * height);
|
||||
}
|
||||
|
||||
} // namespace Slic3r
|
||||
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
27
src/libslic3r/GCode/ThumbnailData.hpp
Normal file
27
src/libslic3r/GCode/ThumbnailData.hpp
Normal file
|
@ -0,0 +1,27 @@
|
|||
#ifndef slic3r_ThumbnailData_hpp_
|
||||
#define slic3r_ThumbnailData_hpp_
|
||||
|
||||
#if ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace Slic3r {
|
||||
|
||||
struct ThumbnailData
|
||||
{
|
||||
unsigned int width;
|
||||
unsigned int height;
|
||||
std::vector<unsigned char> pixels;
|
||||
|
||||
ThumbnailData() { reset(); }
|
||||
void set(unsigned int w, unsigned int h);
|
||||
void reset();
|
||||
|
||||
bool is_valid() const;
|
||||
};
|
||||
|
||||
} // namespace Slic3r
|
||||
|
||||
#endif // ENABLE_THUMBNAIL_GENERATOR
|
||||
|
||||
#endif // slic3r_ThumbnailData_hpp_
|
|
@ -331,15 +331,18 @@ public:
|
|||
|
||||
// Let the firmware back up the active speed override value.
|
||||
WipeTowerWriter& speed_override_backup()
|
||||
{
|
||||
m_gcode += "M220 B\n";
|
||||
{
|
||||
// This is only supported by Prusa at this point (https://github.com/prusa3d/PrusaSlicer/issues/3114)
|
||||
if (m_gcode_flavor == gcfMarlin)
|
||||
m_gcode += "M220 B\n";
|
||||
return *this;
|
||||
}
|
||||
|
||||
// Let the firmware restore the active speed override value.
|
||||
WipeTowerWriter& speed_override_restore()
|
||||
{
|
||||
m_gcode += "M220 R\n";
|
||||
if (m_gcode_flavor == gcfMarlin)
|
||||
m_gcode += "M220 R\n";
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
@ -787,8 +790,10 @@ WipeTower::ToolChangeResult WipeTower::toolchange_Brim(bool sideOnly, float y_of
|
|||
// The tool is supposed to be active and primed at the time when the wipe tower brim is extruded.
|
||||
// Extrude 4 rounds of a brim around the future wipe tower.
|
||||
box_coordinates box(wipeTower_box);
|
||||
// the brim shall have 'normal' spacing with no extra void space
|
||||
float spacing = m_perimeter_width - m_layer_height*float(1.-M_PI_4);
|
||||
for (size_t i = 0; i < 4; ++ i) {
|
||||
box.expand(m_perimeter_width - m_layer_height*float(1.-M_PI_4)); // the brim shall have 'normal' spacing with no extra void space
|
||||
box.expand(spacing);
|
||||
writer.travel (box.ld, 7000)
|
||||
.extrude(box.lu, 2100).extrude(box.ru)
|
||||
.extrude(box.rd ).extrude(box.ld);
|
||||
|
@ -800,6 +805,10 @@ WipeTower::ToolChangeResult WipeTower::toolchange_Brim(bool sideOnly, float y_of
|
|||
writer.append("; CP WIPE TOWER FIRST LAYER BRIM END\n"
|
||||
";-----------------------------------\n");
|
||||
|
||||
// Save actual brim width to be later passed to the Print object, which will use it
|
||||
// for skirt calculation and pass it to GLCanvas for precise preview box
|
||||
m_wipe_tower_brim_width = wipeTower_box.ld.x() - box.ld.x() + spacing/2.f;
|
||||
|
||||
m_print_brim = false; // Mark the brim as extruded
|
||||
|
||||
// Ask our writer about how much material was consumed:
|
||||
|
|
|
@ -92,6 +92,7 @@ public:
|
|||
void generate(std::vector<std::vector<ToolChangeResult>> &result);
|
||||
|
||||
float get_depth() const { return m_wipe_tower_depth; }
|
||||
float get_brim_width() const { return m_wipe_tower_brim_width; }
|
||||
|
||||
|
||||
|
||||
|
@ -203,6 +204,7 @@ private:
|
|||
Vec2f m_wipe_tower_pos; // Left front corner of the wipe tower in mm.
|
||||
float m_wipe_tower_width; // Width of the wipe tower.
|
||||
float m_wipe_tower_depth = 0.f; // Depth of the wipe tower
|
||||
float m_wipe_tower_brim_width = 0.f; // Width of brim (mm)
|
||||
float m_wipe_tower_rotation_angle = 0.f; // Wipe tower rotation angle in degrees (with respect to x axis)
|
||||
float m_internal_rotation = 0.f;
|
||||
float m_y_shift = 0.f; // y shift passed to writer
|
||||
|
|
|
@ -29,6 +29,8 @@ public:
|
|||
float value(Axis axis) const { return m_axis[axis]; }
|
||||
bool has(char axis) const;
|
||||
bool has_value(char axis, float &value) const;
|
||||
float new_X(const GCodeReader &reader) const { return this->has(X) ? this->x() : reader.x(); }
|
||||
float new_Y(const GCodeReader &reader) const { return this->has(Y) ? this->y() : reader.y(); }
|
||||
float new_Z(const GCodeReader &reader) const { return this->has(Z) ? this->z() : reader.z(); }
|
||||
float new_E(const GCodeReader &reader) const { return this->has(E) ? this->e() : reader.e(); }
|
||||
float new_F(const GCodeReader &reader) const { return this->has(F) ? this->f() : reader.f(); }
|
||||
|
|
|
@ -318,12 +318,15 @@ namespace Slic3r {
|
|||
|
||||
assert((g1_line_id >= (int)data->g1_line_ids.size()) || (data->g1_line_ids[g1_line_id].first >= g1_lines_count));
|
||||
const Block* block = nullptr;
|
||||
const G1LineIdToBlockId& map_item = data->g1_line_ids[g1_line_id];
|
||||
if ((g1_line_id < (int)data->g1_line_ids.size()) && (map_item.first == g1_lines_count))
|
||||
if (g1_line_id < (int)data->g1_line_ids.size())
|
||||
{
|
||||
if (line.has_e() && (map_item.second < (unsigned int)data->blocks.size()))
|
||||
block = &data->blocks[map_item.second];
|
||||
++g1_line_id;
|
||||
const G1LineIdToBlockId& map_item = data->g1_line_ids[g1_line_id];
|
||||
if (map_item.first == g1_lines_count)
|
||||
{
|
||||
if (line.has_e() && (map_item.second < (unsigned int)data->blocks.size()))
|
||||
block = &data->blocks[map_item.second];
|
||||
++g1_line_id;
|
||||
}
|
||||
}
|
||||
|
||||
if ((block != nullptr) && (block->elapsed_time != -1.0f))
|
||||
|
@ -412,6 +415,11 @@ namespace Slic3r {
|
|||
m_state.axis[axis].position = position;
|
||||
}
|
||||
|
||||
void GCodeTimeEstimator::set_axis_origin(EAxis axis, float position)
|
||||
{
|
||||
m_state.axis[axis].origin = position;
|
||||
}
|
||||
|
||||
void GCodeTimeEstimator::set_axis_max_feedrate(EAxis axis, float feedrate_mm_sec)
|
||||
{
|
||||
m_state.axis[axis].max_feedrate = feedrate_mm_sec;
|
||||
|
@ -432,6 +440,11 @@ namespace Slic3r {
|
|||
return m_state.axis[axis].position;
|
||||
}
|
||||
|
||||
float GCodeTimeEstimator::get_axis_origin(EAxis axis) const
|
||||
{
|
||||
return m_state.axis[axis].origin;
|
||||
}
|
||||
|
||||
float GCodeTimeEstimator::get_axis_max_feedrate(EAxis axis) const
|
||||
{
|
||||
return m_state.axis[axis].max_feedrate;
|
||||
|
@ -758,6 +771,10 @@ namespace Slic3r {
|
|||
set_axis_position(X, 0.0f);
|
||||
set_axis_position(Y, 0.0f);
|
||||
set_axis_position(Z, 0.0f);
|
||||
set_axis_origin(X, 0.0f);
|
||||
set_axis_origin(Y, 0.0f);
|
||||
set_axis_origin(Z, 0.0f);
|
||||
|
||||
if (get_e_local_positioning_type() == Absolute)
|
||||
set_axis_position(E, 0.0f);
|
||||
|
||||
|
@ -954,34 +971,35 @@ namespace Slic3r {
|
|||
}
|
||||
}
|
||||
|
||||
// Returns the new absolute position on the given axis in dependence of the given parameters
|
||||
float axis_absolute_position_from_G1_line(GCodeTimeEstimator::EAxis axis, const GCodeReader::GCodeLine& lineG1, GCodeTimeEstimator::EUnits units, bool is_relative, float current_absolute_position)
|
||||
{
|
||||
float lengthsScaleFactor = (units == GCodeTimeEstimator::Inches) ? INCHES_TO_MM : 1.0f;
|
||||
if (lineG1.has(Slic3r::Axis(axis)))
|
||||
{
|
||||
float ret = lineG1.value(Slic3r::Axis(axis)) * lengthsScaleFactor;
|
||||
return is_relative ? current_absolute_position + ret : ret;
|
||||
}
|
||||
else
|
||||
return current_absolute_position;
|
||||
}
|
||||
|
||||
void GCodeTimeEstimator::_processG1(const GCodeReader::GCodeLine& line)
|
||||
{
|
||||
auto axis_absolute_position = [this](GCodeTimeEstimator::EAxis axis, const GCodeReader::GCodeLine& lineG1) -> float
|
||||
{
|
||||
float current_absolute_position = get_axis_position(axis);
|
||||
float current_origin = get_axis_origin(axis);
|
||||
float lengthsScaleFactor = (get_units() == GCodeTimeEstimator::Inches) ? INCHES_TO_MM : 1.0f;
|
||||
|
||||
bool is_relative = (get_global_positioning_type() == Relative);
|
||||
if (axis == E)
|
||||
is_relative |= (get_e_local_positioning_type() == Relative);
|
||||
|
||||
if (lineG1.has(Slic3r::Axis(axis)))
|
||||
{
|
||||
float ret = lineG1.value(Slic3r::Axis(axis)) * lengthsScaleFactor;
|
||||
return is_relative ? current_absolute_position + ret : ret + current_origin;
|
||||
}
|
||||
else
|
||||
return current_absolute_position;
|
||||
};
|
||||
|
||||
PROFILE_FUNC();
|
||||
increment_g1_line_id();
|
||||
|
||||
// updates axes positions from line
|
||||
EUnits units = get_units();
|
||||
float new_pos[Num_Axis];
|
||||
for (unsigned char a = X; a < Num_Axis; ++a)
|
||||
{
|
||||
bool is_relative = (get_global_positioning_type() == Relative);
|
||||
if (a == E)
|
||||
is_relative |= (get_e_local_positioning_type() == Relative);
|
||||
|
||||
new_pos[a] = axis_absolute_position_from_G1_line((EAxis)a, line, units, is_relative, get_axis_position((EAxis)a));
|
||||
new_pos[a] = axis_absolute_position((EAxis)a, line);
|
||||
}
|
||||
|
||||
// updates feedrate from line, if present
|
||||
|
@ -1225,25 +1243,25 @@ namespace Slic3r {
|
|||
|
||||
if (line.has_x())
|
||||
{
|
||||
set_axis_position(X, line.x() * lengthsScaleFactor);
|
||||
set_axis_origin(X, get_axis_position(X) - line.x() * lengthsScaleFactor);
|
||||
anyFound = true;
|
||||
}
|
||||
|
||||
if (line.has_y())
|
||||
{
|
||||
set_axis_position(Y, line.y() * lengthsScaleFactor);
|
||||
set_axis_origin(Y, get_axis_position(Y) - line.y() * lengthsScaleFactor);
|
||||
anyFound = true;
|
||||
}
|
||||
|
||||
if (line.has_z())
|
||||
{
|
||||
set_axis_position(Z, line.z() * lengthsScaleFactor);
|
||||
set_axis_origin(Z, get_axis_position(Z) - line.z() * lengthsScaleFactor);
|
||||
anyFound = true;
|
||||
}
|
||||
|
||||
if (line.has_e())
|
||||
{
|
||||
set_axis_position(E, line.e() * lengthsScaleFactor);
|
||||
set_axis_origin(E, get_axis_position(E) - line.e() * lengthsScaleFactor);
|
||||
anyFound = true;
|
||||
}
|
||||
else
|
||||
|
@ -1253,7 +1271,7 @@ namespace Slic3r {
|
|||
{
|
||||
for (unsigned char a = X; a < Num_Axis; ++a)
|
||||
{
|
||||
set_axis_position((EAxis)a, 0.0f);
|
||||
set_axis_origin((EAxis)a, get_axis_position((EAxis)a));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,6 +55,7 @@ namespace Slic3r {
|
|||
struct Axis
|
||||
{
|
||||
float position; // mm
|
||||
float origin; // mm
|
||||
float max_feedrate; // mm/s
|
||||
float max_acceleration; // mm/s^2
|
||||
float max_jerk; // mm/s
|
||||
|
@ -282,6 +283,8 @@ namespace Slic3r {
|
|||
|
||||
// Set current position on the given axis with the given value
|
||||
void set_axis_position(EAxis axis, float position);
|
||||
// Set current origin on the given axis with the given value
|
||||
void set_axis_origin(EAxis axis, float position);
|
||||
|
||||
void set_axis_max_feedrate(EAxis axis, float feedrate_mm_sec);
|
||||
void set_axis_max_acceleration(EAxis axis, float acceleration);
|
||||
|
@ -289,6 +292,8 @@ namespace Slic3r {
|
|||
|
||||
// Returns current position on the given axis
|
||||
float get_axis_position(EAxis axis) const;
|
||||
// Returns current origin on the given axis
|
||||
float get_axis_origin(EAxis axis) const;
|
||||
|
||||
float get_axis_max_feedrate(EAxis axis) const;
|
||||
float get_axis_max_acceleration(EAxis axis) const;
|
||||
|
|
|
@ -269,7 +269,7 @@ std::string GCodeWriter::set_speed(double F, const std::string &comment, const s
|
|||
assert(F > 0.);
|
||||
assert(F < 100000.);
|
||||
std::ostringstream gcode;
|
||||
gcode << "G1 F" << F;
|
||||
gcode << "G1 F" << XYZF_NUM(F);
|
||||
COMMENT(comment);
|
||||
gcode << cooling_marker;
|
||||
gcode << "\n";
|
||||
|
|
|
@ -3,18 +3,22 @@
|
|||
#include "ClipperUtils.hpp"
|
||||
#include "ExPolygon.hpp"
|
||||
#include "Line.hpp"
|
||||
#include "PolylineCollection.hpp"
|
||||
#include "clipper.hpp"
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
#include <cmath>
|
||||
#include <list>
|
||||
#include <map>
|
||||
#include <numeric>
|
||||
#include <set>
|
||||
#include <utility>
|
||||
#include <stack>
|
||||
#include <vector>
|
||||
|
||||
#include <boost/algorithm/string/classification.hpp>
|
||||
#include <boost/algorithm/string/split.hpp>
|
||||
#include <boost/log/trivial.hpp>
|
||||
|
||||
#ifdef SLIC3R_DEBUG
|
||||
#include "SVG.hpp"
|
||||
#endif
|
||||
|
@ -309,49 +313,7 @@ convex_hull(const Polygons &polygons)
|
|||
return convex_hull(std::move(pp));
|
||||
}
|
||||
|
||||
/* accepts an arrayref of points and returns a list of indices
|
||||
according to a nearest-neighbor walk */
|
||||
void
|
||||
chained_path(const Points &points, std::vector<Points::size_type> &retval, Point start_near)
|
||||
{
|
||||
PointConstPtrs my_points;
|
||||
std::map<const Point*,Points::size_type> indices;
|
||||
my_points.reserve(points.size());
|
||||
for (Points::const_iterator it = points.begin(); it != points.end(); ++it) {
|
||||
my_points.push_back(&*it);
|
||||
indices[&*it] = it - points.begin();
|
||||
}
|
||||
|
||||
retval.reserve(points.size());
|
||||
while (!my_points.empty()) {
|
||||
Points::size_type idx = start_near.nearest_point_index(my_points);
|
||||
start_near = *my_points[idx];
|
||||
retval.push_back(indices[ my_points[idx] ]);
|
||||
my_points.erase(my_points.begin() + idx);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
chained_path(const Points &points, std::vector<Points::size_type> &retval)
|
||||
{
|
||||
if (points.empty()) return; // can't call front() on empty vector
|
||||
chained_path(points, retval, points.front());
|
||||
}
|
||||
|
||||
/* retval and items must be different containers */
|
||||
template<class T>
|
||||
void
|
||||
chained_path_items(Points &points, T &items, T &retval)
|
||||
{
|
||||
std::vector<Points::size_type> indices;
|
||||
chained_path(points, indices);
|
||||
for (std::vector<Points::size_type>::const_iterator it = indices.begin(); it != indices.end(); ++it)
|
||||
retval.push_back(items[*it]);
|
||||
}
|
||||
template void chained_path_items(Points &points, ClipperLib::PolyNodes &items, ClipperLib::PolyNodes &retval);
|
||||
|
||||
bool
|
||||
directions_parallel(double angle1, double angle2, double max_diff)
|
||||
bool directions_parallel(double angle1, double angle2, double max_diff)
|
||||
{
|
||||
double diff = fabs(angle1 - angle2);
|
||||
max_diff += EPSILON;
|
||||
|
@ -359,8 +321,7 @@ directions_parallel(double angle1, double angle2, double max_diff)
|
|||
}
|
||||
|
||||
template<class T>
|
||||
bool
|
||||
contains(const std::vector<T> &vector, const Point &point)
|
||||
bool contains(const std::vector<T> &vector, const Point &point)
|
||||
{
|
||||
for (typename std::vector<T>::const_iterator it = vector.begin(); it != vector.end(); ++it) {
|
||||
if (it->contains(point)) return true;
|
||||
|
@ -369,16 +330,101 @@ contains(const std::vector<T> &vector, const Point &point)
|
|||
}
|
||||
template bool contains(const ExPolygons &vector, const Point &point);
|
||||
|
||||
double
|
||||
rad2deg_dir(double angle)
|
||||
double rad2deg_dir(double angle)
|
||||
{
|
||||
angle = (angle < PI) ? (-angle + PI/2.0) : (angle + PI/2.0);
|
||||
if (angle < 0) angle += PI;
|
||||
return rad2deg(angle);
|
||||
}
|
||||
|
||||
void
|
||||
simplify_polygons(const Polygons &polygons, double tolerance, Polygons* retval)
|
||||
Point circle_taubin_newton(const Points::const_iterator& input_begin, const Points::const_iterator& input_end, size_t cycles)
|
||||
{
|
||||
Vec2ds tmp;
|
||||
tmp.reserve(std::distance(input_begin, input_end));
|
||||
std::transform(input_begin, input_end, std::back_inserter(tmp), [] (const Point& in) { return unscale(in); } );
|
||||
Vec2d center = circle_taubin_newton(tmp.cbegin(), tmp.end(), cycles);
|
||||
return Point::new_scale(center.x(), center.y());
|
||||
}
|
||||
|
||||
/// Adapted from work in "Circular and Linear Regression: Fitting circles and lines by least squares", pg 126
|
||||
/// Returns a point corresponding to the center of a circle for which all of the points from input_begin to input_end
|
||||
/// lie on.
|
||||
Vec2d circle_taubin_newton(const Vec2ds::const_iterator& input_begin, const Vec2ds::const_iterator& input_end, size_t cycles)
|
||||
{
|
||||
// calculate the centroid of the data set
|
||||
const Vec2d sum = std::accumulate(input_begin, input_end, Vec2d(0,0));
|
||||
const size_t n = std::distance(input_begin, input_end);
|
||||
const double n_flt = static_cast<double>(n);
|
||||
const Vec2d centroid { sum / n_flt };
|
||||
|
||||
// Compute the normalized moments of the data set.
|
||||
double Mxx = 0, Myy = 0, Mxy = 0, Mxz = 0, Myz = 0, Mzz = 0;
|
||||
for (auto it = input_begin; it < input_end; ++it) {
|
||||
// center/normalize the data.
|
||||
double Xi {it->x() - centroid.x()};
|
||||
double Yi {it->y() - centroid.y()};
|
||||
double Zi {Xi*Xi + Yi*Yi};
|
||||
Mxy += (Xi*Yi);
|
||||
Mxx += (Xi*Xi);
|
||||
Myy += (Yi*Yi);
|
||||
Mxz += (Xi*Zi);
|
||||
Myz += (Yi*Zi);
|
||||
Mzz += (Zi*Zi);
|
||||
}
|
||||
|
||||
// divide by number of points to get the moments
|
||||
Mxx /= n_flt;
|
||||
Myy /= n_flt;
|
||||
Mxy /= n_flt;
|
||||
Mxz /= n_flt;
|
||||
Myz /= n_flt;
|
||||
Mzz /= n_flt;
|
||||
|
||||
// Compute the coefficients of the characteristic polynomial for the circle
|
||||
// eq 5.60
|
||||
const double Mz {Mxx + Myy}; // xx + yy = z
|
||||
const double Cov_xy {Mxx*Myy - Mxy*Mxy}; // this shows up a couple times so cache it here.
|
||||
const double C3 {4.0*Mz};
|
||||
const double C2 {-3.0*(Mz*Mz) - Mzz};
|
||||
const double C1 {Mz*(Mzz - (Mz*Mz)) + 4.0*Mz*Cov_xy - (Mxz*Mxz) - (Myz*Myz)};
|
||||
const double C0 {(Mxz*Mxz)*Myy + (Myz*Myz)*Mxx - 2.0*Mxz*Myz*Mxy - Cov_xy*(Mzz - (Mz*Mz))};
|
||||
|
||||
const double C22 = {C2 + C2};
|
||||
const double C33 = {C3 + C3 + C3};
|
||||
|
||||
// solve the characteristic polynomial with Newton's method.
|
||||
double xnew = 0.0;
|
||||
double ynew = 1e20;
|
||||
|
||||
for (size_t i = 0; i < cycles; ++i) {
|
||||
const double yold {ynew};
|
||||
ynew = C0 + xnew * (C1 + xnew*(C2 + xnew * C3));
|
||||
if (std::abs(ynew) > std::abs(yold)) {
|
||||
BOOST_LOG_TRIVIAL(error) << "Geometry: Fit is going in the wrong direction.\n";
|
||||
return Vec2d(std::nan(""), std::nan(""));
|
||||
}
|
||||
const double Dy {C1 + xnew*(C22 + xnew*C33)};
|
||||
|
||||
const double xold {xnew};
|
||||
xnew = xold - (ynew / Dy);
|
||||
|
||||
if (std::abs((xnew-xold) / xnew) < 1e-12) i = cycles; // converged, we're done here
|
||||
|
||||
if (xnew < 0) {
|
||||
// reset, we went negative
|
||||
xnew = 0.0;
|
||||
}
|
||||
}
|
||||
|
||||
// compute the determinant and the circle's parameters now that we've solved.
|
||||
double DET = xnew*xnew - xnew*Mz + Cov_xy;
|
||||
|
||||
Vec2d center(Mxz * (Myy - xnew) - Myz * Mxy, Myz * (Mxx - xnew) - Mxz*Mxy);
|
||||
center /= (DET * 2.);
|
||||
return center + centroid;
|
||||
}
|
||||
|
||||
void simplify_polygons(const Polygons &polygons, double tolerance, Polygons* retval)
|
||||
{
|
||||
Polygons pp;
|
||||
for (Polygons::const_iterator it = polygons.begin(); it != polygons.end(); ++it) {
|
||||
|
@ -391,8 +437,7 @@ simplify_polygons(const Polygons &polygons, double tolerance, Polygons* retval)
|
|||
*retval = Slic3r::simplify_polygons(pp);
|
||||
}
|
||||
|
||||
double
|
||||
linint(double value, double oldmin, double oldmax, double newmin, double newmax)
|
||||
double linint(double value, double oldmin, double oldmax, double newmin, double newmax)
|
||||
{
|
||||
return (value - oldmin) * (newmax - newmin) / (oldmax - oldmin) + newmin;
|
||||
}
|
||||
|
@ -618,7 +663,6 @@ namespace Voronoi { namespace Internal {
|
|||
typedef boost::polygon::point_data<coordinate_type> point_type;
|
||||
typedef boost::polygon::segment_data<coordinate_type> segment_type;
|
||||
typedef boost::polygon::rectangle_data<coordinate_type> rect_type;
|
||||
// typedef voronoi_builder<int> VB;
|
||||
typedef boost::polygon::voronoi_diagram<coordinate_type> VD;
|
||||
typedef VD::cell_type cell_type;
|
||||
typedef VD::cell_type::source_index_type source_index_type;
|
||||
|
@ -665,15 +709,15 @@ namespace Voronoi { namespace Internal {
|
|||
if (cell1.contains_point() && cell2.contains_point()) {
|
||||
point_type p1 = retrieve_point(segments, cell1);
|
||||
point_type p2 = retrieve_point(segments, cell2);
|
||||
origin.x((p1(0) + p2(0)) * 0.5);
|
||||
origin.y((p1(1) + p2(1)) * 0.5);
|
||||
direction.x(p1(1) - p2(1));
|
||||
direction.y(p2(0) - p1(0));
|
||||
origin.x((p1.x() + p2.x()) * 0.5);
|
||||
origin.y((p1.y() + p2.y()) * 0.5);
|
||||
direction.x(p1.y() - p2.y());
|
||||
direction.y(p2.x() - p1.x());
|
||||
} else {
|
||||
origin = cell1.contains_segment() ? retrieve_point(segments, cell2) : retrieve_point(segments, cell1);
|
||||
segment_type segment = cell1.contains_segment() ? segments[cell1.source_index()] : segments[cell2.source_index()];
|
||||
coordinate_type dx = high(segment)(0) - low(segment)(0);
|
||||
coordinate_type dy = high(segment)(1) - low(segment)(1);
|
||||
coordinate_type dx = high(segment).x() - low(segment).x();
|
||||
coordinate_type dy = high(segment).y() - low(segment).y();
|
||||
if ((low(segment) == origin) ^ cell1.contains_point()) {
|
||||
direction.x(dy);
|
||||
direction.y(-dx);
|
||||
|
@ -682,19 +726,19 @@ namespace Voronoi { namespace Internal {
|
|||
direction.y(dx);
|
||||
}
|
||||
}
|
||||
coordinate_type koef = bbox_max_size / (std::max)(fabs(direction(0)), fabs(direction(1)));
|
||||
coordinate_type koef = bbox_max_size / (std::max)(fabs(direction.x()), fabs(direction.y()));
|
||||
if (edge.vertex0() == NULL) {
|
||||
clipped_edge->push_back(point_type(
|
||||
origin(0) - direction(0) * koef,
|
||||
origin(1) - direction(1) * koef));
|
||||
origin.x() - direction.x() * koef,
|
||||
origin.y() - direction.y() * koef));
|
||||
} else {
|
||||
clipped_edge->push_back(
|
||||
point_type(edge.vertex0()->x(), edge.vertex0()->y()));
|
||||
}
|
||||
if (edge.vertex1() == NULL) {
|
||||
clipped_edge->push_back(point_type(
|
||||
origin(0) + direction(0) * koef,
|
||||
origin(1) + direction(1) * koef));
|
||||
origin.x() + direction.x() * koef,
|
||||
origin.y() + direction.y() * koef));
|
||||
} else {
|
||||
clipped_edge->push_back(
|
||||
point_type(edge.vertex1()->x(), edge.vertex1()->y()));
|
||||
|
@ -714,7 +758,7 @@ namespace Voronoi { namespace Internal {
|
|||
|
||||
} /* namespace Internal */ } // namespace Voronoi
|
||||
|
||||
static inline void dump_voronoi_to_svg(const Lines &lines, /* const */ voronoi_diagram<double> &vd, const ThickPolylines *polylines, const char *path)
|
||||
static inline void dump_voronoi_to_svg(const Lines &lines, /* const */ boost::polygon::voronoi_diagram<double> &vd, const ThickPolylines *polylines, const char *path)
|
||||
{
|
||||
const double scale = 0.2;
|
||||
const std::string inputSegmentPointColor = "lightseagreen";
|
||||
|
@ -758,7 +802,7 @@ static inline void dump_voronoi_to_svg(const Lines &lines, /* const */ voronoi_d
|
|||
Voronoi::Internal::point_type(double(it->b(0)), double(it->b(1)))));
|
||||
|
||||
// Color exterior edges.
|
||||
for (voronoi_diagram<double>::const_edge_iterator it = vd.edges().begin(); it != vd.edges().end(); ++it)
|
||||
for (boost::polygon::voronoi_diagram<double>::const_edge_iterator it = vd.edges().begin(); it != vd.edges().end(); ++it)
|
||||
if (!it->is_finite())
|
||||
Voronoi::Internal::color_exterior(&(*it));
|
||||
|
||||
|
@ -773,11 +817,11 @@ static inline void dump_voronoi_to_svg(const Lines &lines, /* const */ voronoi_d
|
|||
|
||||
#if 1
|
||||
// Draw voronoi vertices.
|
||||
for (voronoi_diagram<double>::const_vertex_iterator it = vd.vertices().begin(); it != vd.vertices().end(); ++it)
|
||||
for (boost::polygon::voronoi_diagram<double>::const_vertex_iterator it = vd.vertices().begin(); it != vd.vertices().end(); ++it)
|
||||
if (! internalEdgesOnly || it->color() != Voronoi::Internal::EXTERNAL_COLOR)
|
||||
svg.draw(Point(coord_t((*it)(0)), coord_t((*it)(1))), voronoiPointColor, voronoiPointRadius);
|
||||
svg.draw(Point(coord_t(it->x()), coord_t(it->y())), voronoiPointColor, voronoiPointRadius);
|
||||
|
||||
for (voronoi_diagram<double>::const_edge_iterator it = vd.edges().begin(); it != vd.edges().end(); ++it) {
|
||||
for (boost::polygon::voronoi_diagram<double>::const_edge_iterator it = vd.edges().begin(); it != vd.edges().end(); ++it) {
|
||||
if (primaryEdgesOnly && !it->is_primary())
|
||||
continue;
|
||||
if (internalEdgesOnly && (it->color() == Voronoi::Internal::EXTERNAL_COLOR))
|
||||
|
@ -800,7 +844,7 @@ static inline void dump_voronoi_to_svg(const Lines &lines, /* const */ voronoi_d
|
|||
color = voronoiLineColorSecondary;
|
||||
}
|
||||
for (std::size_t i = 0; i + 1 < samples.size(); ++i)
|
||||
svg.draw(Line(Point(coord_t(samples[i](0)), coord_t(samples[i](1))), Point(coord_t(samples[i+1](0)), coord_t(samples[i+1](1)))), color, voronoiLineWidth);
|
||||
svg.draw(Line(Point(coord_t(samples[i].x()), coord_t(samples[i].y())), Point(coord_t(samples[i+1].x()), coord_t(samples[i+1].y()))), color, voronoiLineWidth);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@ -1376,6 +1420,32 @@ void Transformation::set_from_transform(const Transform3d& transform)
|
|||
// std::cout << "something went wrong in extracting data from matrix" << std::endl;
|
||||
}
|
||||
|
||||
void Transformation::set_from_string(const std::string& transform_str)
|
||||
{
|
||||
Transform3d transform = Transform3d::Identity();
|
||||
|
||||
if (!transform_str.empty())
|
||||
{
|
||||
std::vector<std::string> mat_elements_str;
|
||||
boost::split(mat_elements_str, transform_str, boost::is_any_of(" "), boost::token_compress_on);
|
||||
|
||||
unsigned int size = (unsigned int)mat_elements_str.size();
|
||||
if (size == 16)
|
||||
{
|
||||
unsigned int i = 0;
|
||||
for (unsigned int r = 0; r < 4; ++r)
|
||||
{
|
||||
for (unsigned int c = 0; c < 4; ++c)
|
||||
{
|
||||
transform(r, c) = ::atof(mat_elements_str[i++].c_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
set_from_transform(transform);
|
||||
}
|
||||
|
||||
void Transformation::reset()
|
||||
{
|
||||
m_offset = Vec3d::Zero();
|
||||
|
|
|
@ -11,8 +11,11 @@
|
|||
#include <cereal/access.hpp>
|
||||
|
||||
#include "boost/polygon/voronoi.hpp"
|
||||
using boost::polygon::voronoi_builder;
|
||||
using boost::polygon::voronoi_diagram;
|
||||
|
||||
namespace ClipperLib {
|
||||
class PolyNode;
|
||||
using PolyNodes = std::vector<PolyNode*>;
|
||||
}
|
||||
|
||||
namespace Slic3r { namespace Geometry {
|
||||
|
||||
|
@ -138,9 +141,6 @@ Pointf3s convex_hull(Pointf3s points);
|
|||
Polygon convex_hull(Points points);
|
||||
Polygon convex_hull(const Polygons &polygons);
|
||||
|
||||
void chained_path(const Points &points, std::vector<Points::size_type> &retval, Point start_near);
|
||||
void chained_path(const Points &points, std::vector<Points::size_type> &retval);
|
||||
template<class T> void chained_path_items(Points &points, T &items, T &retval);
|
||||
bool directions_parallel(double angle1, double angle2, double max_diff = 0);
|
||||
template<class T> bool contains(const std::vector<T> &vector, const Point &point);
|
||||
template<typename T> T rad2deg(T angle) { return T(180.0) * angle / T(PI); }
|
||||
|
@ -160,6 +160,15 @@ template<typename T> T angle_to_0_2PI(T angle)
|
|||
|
||||
return angle;
|
||||
}
|
||||
|
||||
/// Find the center of the circle corresponding to the vector of Points as an arc.
|
||||
Point circle_taubin_newton(const Points::const_iterator& input_start, const Points::const_iterator& input_end, size_t cycles = 20);
|
||||
inline Point circle_taubin_newton(const Points& input, size_t cycles = 20) { return circle_taubin_newton(input.cbegin(), input.cend(), cycles); }
|
||||
|
||||
/// Find the center of the circle corresponding to the vector of Pointfs as an arc.
|
||||
Vec2d circle_taubin_newton(const Vec2ds::const_iterator& input_start, const Vec2ds::const_iterator& input_end, size_t cycles = 20);
|
||||
inline Vec2d circle_taubin_newton(const Vec2ds& input, size_t cycles = 20) { return circle_taubin_newton(input.cbegin(), input.cend(), cycles); }
|
||||
|
||||
void simplify_polygons(const Polygons &polygons, double tolerance, Polygons* retval);
|
||||
|
||||
double linint(double value, double oldmin, double oldmax, double newmin, double newmax);
|
||||
|
@ -181,7 +190,7 @@ class MedialAxis {
|
|||
void build(Polylines* polylines);
|
||||
|
||||
private:
|
||||
class VD : public voronoi_diagram<double> {
|
||||
class VD : public boost::polygon::voronoi_diagram<double> {
|
||||
public:
|
||||
typedef double coord_type;
|
||||
typedef boost::polygon::point_data<coordinate_type> point_type;
|
||||
|
@ -278,6 +287,7 @@ public:
|
|||
void set_mirror(Axis axis, double mirror);
|
||||
|
||||
void set_from_transform(const Transform3d& transform);
|
||||
void set_from_string(const std::string& transform_str);
|
||||
|
||||
void reset();
|
||||
|
||||
|
|
233
src/libslic3r/KDTreeIndirect.hpp
Normal file
233
src/libslic3r/KDTreeIndirect.hpp
Normal file
|
@ -0,0 +1,233 @@
|
|||
// KD tree built upon external data set, referencing the external data by integer indices.
|
||||
|
||||
#ifndef slic3r_KDTreeIndirect_hpp_
|
||||
#define slic3r_KDTreeIndirect_hpp_
|
||||
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <vector>
|
||||
|
||||
#include "Utils.hpp" // for next_highest_power_of_2()
|
||||
|
||||
namespace Slic3r {
|
||||
|
||||
// KD tree for N-dimensional closest point search.
|
||||
template<size_t ANumDimensions, typename ACoordType, typename ACoordinateFn>
|
||||
class KDTreeIndirect
|
||||
{
|
||||
public:
|
||||
static constexpr size_t NumDimensions = ANumDimensions;
|
||||
using CoordinateFn = ACoordinateFn;
|
||||
using CoordType = ACoordType;
|
||||
// Following could be static constexpr size_t, but that would not link in C++11
|
||||
enum : size_t {
|
||||
npos = size_t(-1)
|
||||
};
|
||||
|
||||
KDTreeIndirect(CoordinateFn coordinate) : coordinate(coordinate) {}
|
||||
KDTreeIndirect(CoordinateFn coordinate, std::vector<size_t> indices) : coordinate(coordinate) { this->build(std::move(indices)); }
|
||||
KDTreeIndirect(CoordinateFn coordinate, std::vector<size_t> &&indices) : coordinate(coordinate) { this->build(std::move(indices)); }
|
||||
KDTreeIndirect(CoordinateFn coordinate, size_t num_indices) : coordinate(coordinate) { this->build(num_indices); }
|
||||
KDTreeIndirect(KDTreeIndirect &&rhs) : m_nodes(std::move(rhs.m_nodes)), coordinate(std::move(rhs.coordinate)) {}
|
||||
KDTreeIndirect& operator=(KDTreeIndirect &&rhs) { m_nodes = std::move(rhs.m_nodes); coordinate = std::move(rhs.coordinate); return *this; }
|
||||
void clear() { m_nodes.clear(); }
|
||||
|
||||
void build(size_t num_indices)
|
||||
{
|
||||
std::vector<size_t> indices;
|
||||
indices.reserve(num_indices);
|
||||
for (size_t i = 0; i < num_indices; ++ i)
|
||||
indices.emplace_back(i);
|
||||
this->build(std::move(indices));
|
||||
}
|
||||
|
||||
void build(std::vector<size_t> &&indices)
|
||||
{
|
||||
if (indices.empty())
|
||||
clear();
|
||||
else {
|
||||
// Allocate a next highest power of 2 nodes, because the incomplete binary tree will not have the leaves filled strictly from the left.
|
||||
m_nodes.assign(next_highest_power_of_2(indices.size() + 1), npos);
|
||||
build_recursive(indices, 0, 0, 0, (int)(indices.size() - 1));
|
||||
}
|
||||
indices.clear();
|
||||
}
|
||||
|
||||
enum class VisitorReturnMask : unsigned int
|
||||
{
|
||||
CONTINUE_LEFT = 1,
|
||||
CONTINUE_RIGHT = 2,
|
||||
STOP = 4,
|
||||
};
|
||||
template<typename CoordType>
|
||||
unsigned int descent_mask(const CoordType &point_coord, const CoordType &search_radius, size_t idx, size_t dimension) const
|
||||
{
|
||||
CoordType dist = point_coord - this->coordinate(idx, dimension);
|
||||
return (dist * dist < search_radius + CoordType(EPSILON)) ?
|
||||
// The plane intersects a hypersphere centered at point_coord of search_radius.
|
||||
((unsigned int)(VisitorReturnMask::CONTINUE_LEFT) | (unsigned int)(VisitorReturnMask::CONTINUE_RIGHT)) :
|
||||
// The plane does not intersect the hypersphere.
|
||||
(dist > CoordType(0)) ? (unsigned int)(VisitorReturnMask::CONTINUE_RIGHT) : (unsigned int)(VisitorReturnMask::CONTINUE_LEFT);
|
||||
}
|
||||
|
||||
// Visitor is supposed to return a bit mask of VisitorReturnMask.
|
||||
template<typename Visitor>
|
||||
void visit(Visitor &visitor) const
|
||||
{
|
||||
visit_recursive(0, 0, visitor);
|
||||
}
|
||||
|
||||
CoordinateFn coordinate;
|
||||
|
||||
private:
|
||||
// Build a balanced tree by splitting the input sequence by an axis aligned plane at a dimension.
|
||||
void build_recursive(std::vector<size_t> &input, size_t node, int dimension, int left, int right)
|
||||
{
|
||||
if (left > right)
|
||||
return;
|
||||
|
||||
assert(node < m_nodes.size());
|
||||
|
||||
if (left == right) {
|
||||
// Insert a node into the balanced tree.
|
||||
m_nodes[node] = input[left];
|
||||
return;
|
||||
}
|
||||
|
||||
// Partition the input sequence to two equal halves.
|
||||
int center = (left + right) >> 1;
|
||||
partition_input(input, dimension, left, right, center);
|
||||
// Insert a node into the tree.
|
||||
m_nodes[node] = input[center];
|
||||
// Partition the left and right subtrees.
|
||||
size_t next_dimension = (++ dimension == NumDimensions) ? 0 : dimension;
|
||||
build_recursive(input, (node << 1) + 1, next_dimension, left, center - 1);
|
||||
build_recursive(input, (node << 1) + 2, next_dimension, center + 1, right);
|
||||
}
|
||||
|
||||
// Partition the input m_nodes <left, right> at k using QuickSelect method.
|
||||
// https://en.wikipedia.org/wiki/Quickselect
|
||||
void partition_input(std::vector<size_t> &input, int dimension, int left, int right, int k) const
|
||||
{
|
||||
while (left < right) {
|
||||
// Guess the k'th element.
|
||||
// Pick the pivot as a median of first, center and last value.
|
||||
// Sort first, center and last values.
|
||||
int center = (left + right) >> 1;
|
||||
auto left_value = this->coordinate(input[left], dimension);
|
||||
auto center_value = this->coordinate(input[center], dimension);
|
||||
auto right_value = this->coordinate(input[right], dimension);
|
||||
if (center_value < left_value) {
|
||||
std::swap(input[left], input[center]);
|
||||
std::swap(left_value, center_value);
|
||||
}
|
||||
if (right_value < left_value) {
|
||||
std::swap(input[left], input[right]);
|
||||
std::swap(left_value, right_value);
|
||||
}
|
||||
if (right_value < center_value) {
|
||||
std::swap(input[center], input[right]);
|
||||
// No need to do that, result is not used.
|
||||
// std::swap(center_value, right_value);
|
||||
}
|
||||
// Only two or three values are left and those are sorted already.
|
||||
if (left + 3 > right)
|
||||
break;
|
||||
// left and right items are already at their correct positions.
|
||||
// input[left].point[dimension] <= input[center].point[dimension] <= input[right].point[dimension]
|
||||
// Move the pivot to the (right - 1) position.
|
||||
std::swap(input[center], input[right - 1]);
|
||||
// Pivot value.
|
||||
double pivot = this->coordinate(input[right - 1], dimension);
|
||||
// Partition the set based on the pivot.
|
||||
int i = left;
|
||||
int j = right - 1;
|
||||
for (;;) {
|
||||
// Skip left points that are already at correct positions.
|
||||
// Search will certainly stop at position (right - 1), which stores the pivot.
|
||||
while (this->coordinate(input[++ i], dimension) < pivot) ;
|
||||
// Skip right points that are already at correct positions.
|
||||
while (this->coordinate(input[-- j], dimension) > pivot && i < j) ;
|
||||
if (i >= j)
|
||||
break;
|
||||
std::swap(input[i], input[j]);
|
||||
}
|
||||
// Restore pivot to the center of the sequence.
|
||||
std::swap(input[i], input[right]);
|
||||
// Which side the kth element is in?
|
||||
if (k < i)
|
||||
right = i - 1;
|
||||
else if (k == i)
|
||||
// Sequence is partitioned, kth element is at its place.
|
||||
break;
|
||||
else
|
||||
left = i + 1;
|
||||
}
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
void visit_recursive(size_t node, size_t dimension, Visitor &visitor) const
|
||||
{
|
||||
assert(! m_nodes.empty());
|
||||
if (node >= m_nodes.size() || m_nodes[node] == npos)
|
||||
return;
|
||||
|
||||
// Left / right child node index.
|
||||
size_t left = (node << 1) + 1;
|
||||
size_t right = left + 1;
|
||||
unsigned int mask = visitor(m_nodes[node], dimension);
|
||||
if ((mask & (unsigned int)VisitorReturnMask::STOP) == 0) {
|
||||
size_t next_dimension = (++ dimension == NumDimensions) ? 0 : dimension;
|
||||
if (mask & (unsigned int)VisitorReturnMask::CONTINUE_LEFT)
|
||||
visit_recursive(left, next_dimension, visitor);
|
||||
if (mask & (unsigned int)VisitorReturnMask::CONTINUE_RIGHT)
|
||||
visit_recursive(right, next_dimension, visitor);
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<size_t> m_nodes;
|
||||
};
|
||||
|
||||
// Find a closest point using Euclidian metrics.
|
||||
// Returns npos if not found.
|
||||
template<typename KDTreeIndirectType, typename PointType, typename FilterFn>
|
||||
size_t find_closest_point(const KDTreeIndirectType &kdtree, const PointType &point, FilterFn filter)
|
||||
{
|
||||
struct Visitor {
|
||||
using CoordType = typename KDTreeIndirectType::CoordType;
|
||||
const KDTreeIndirectType &kdtree;
|
||||
const PointType &point;
|
||||
const FilterFn filter;
|
||||
size_t min_idx = KDTreeIndirectType::npos;
|
||||
CoordType min_dist = std::numeric_limits<CoordType>::max();
|
||||
|
||||
Visitor(const KDTreeIndirectType &kdtree, const PointType &point, FilterFn filter) : kdtree(kdtree), point(point), filter(filter) {}
|
||||
unsigned int operator()(size_t idx, size_t dimension) {
|
||||
if (this->filter(idx)) {
|
||||
auto dist = CoordType(0);
|
||||
for (size_t i = 0; i < KDTreeIndirectType::NumDimensions; ++ i) {
|
||||
CoordType d = point[i] - kdtree.coordinate(idx, i);
|
||||
dist += d * d;
|
||||
}
|
||||
if (dist < min_dist) {
|
||||
min_dist = dist;
|
||||
min_idx = idx;
|
||||
}
|
||||
}
|
||||
return kdtree.descent_mask(point[dimension], min_dist, idx, dimension);
|
||||
}
|
||||
} visitor(kdtree, point, filter);
|
||||
|
||||
kdtree.visit(visitor);
|
||||
return visitor.min_idx;
|
||||
}
|
||||
|
||||
template<typename KDTreeIndirectType, typename PointType>
|
||||
size_t find_closest_point(const KDTreeIndirectType& kdtree, const PointType& point)
|
||||
{
|
||||
return find_closest_point(kdtree, point, [](size_t) { return true; });
|
||||
}
|
||||
|
||||
} // namespace Slic3r
|
||||
|
||||
#endif /* slic3r_KDTreeIndirect_hpp_ */
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue