diff --git a/gpb/CMakeLists.txt b/gpb/CMakeLists.txt new file mode 100644 index 0000000..19ce125 --- /dev/null +++ b/gpb/CMakeLists.txt @@ -0,0 +1,37 @@ +cmake_minimum_required(VERSION 2.6) +project(gpb) +set(PROJECT_NAME gpb) + +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake) + +# Boost +set(Boost_USE_STATIC_LIBS ON) +set(Boost_USE_MULTITHREADED ON) +find_package(Boost COMPONENTS python REQUIRED) + +find_package(OpenCV REQUIRED) +find_package(PythonLibs REQUIRED) +find_package(NumPy REQUIRED) + +set(CMAKE_CXX_FLAGS "-std=c++11 -g") + +include_directories( + include + ${PYTHON_INCLUDE_DIRS} + ${NUMPY_INCLUDE_DIRS} +) + +add_library(gpb_selection MODULE src/main.cpp src/conversion.cpp) +set_target_properties(gpb_selection PROPERTIES PREFIX "") +target_link_libraries(gpb_selection + ${Boost_LIBRARIES} + ${OpenCV_LIBRARIES} + ${PYTHON_LIBRARIES} +) + +add_executable(gpb src/main.cpp src/conversion.cpp) +target_link_libraries(gpb + ${Boost_LIBRARIES} + ${OpenCV_LIBRARIES} + ${PYTHON_LIBRARIES} +) diff --git a/gpb/cmake/FindNumPy.cmake b/gpb/cmake/FindNumPy.cmake new file mode 100644 index 0000000..eafed16 --- /dev/null +++ b/gpb/cmake/FindNumPy.cmake @@ -0,0 +1,102 @@ +# - Find the NumPy libraries +# This module finds if NumPy is installed, and sets the following variables +# indicating where it is. +# +# TODO: Update to provide the libraries and paths for linking npymath lib. +# +# NUMPY_FOUND - was NumPy found +# NUMPY_VERSION - the version of NumPy found as a string +# NUMPY_VERSION_MAJOR - the major version number of NumPy +# NUMPY_VERSION_MINOR - the minor version number of NumPy +# NUMPY_VERSION_PATCH - the patch version number of NumPy +# NUMPY_VERSION_DECIMAL - e.g. version 1.6.1 is 10601 +# NUMPY_INCLUDE_DIRS - path to the NumPy include files + +#============================================================================ +# Copyright 2012 Continuum Analytics, Inc. +# +# MIT License +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to permit +# persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR +# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +#============================================================================ + +# Finding NumPy involves calling the Python interpreter +if(NumPy_FIND_REQUIRED) + find_package(PythonInterp REQUIRED) +else() + find_package(PythonInterp) +endif() + +if(NOT PYTHONINTERP_FOUND) + set(NUMPY_FOUND FALSE) + return() +endif() + +execute_process(COMMAND "${PYTHON_EXECUTABLE}" "-c" + "import numpy as n; print(n.__version__); print(n.get_include());" + RESULT_VARIABLE _NUMPY_SEARCH_SUCCESS + OUTPUT_VARIABLE _NUMPY_VALUES_OUTPUT + ERROR_VARIABLE _NUMPY_ERROR_VALUE + OUTPUT_STRIP_TRAILING_WHITESPACE) + +if(NOT _NUMPY_SEARCH_SUCCESS MATCHES 0) + if(NumPy_FIND_REQUIRED) + message(FATAL_ERROR + "NumPy import failure:\n${_NUMPY_ERROR_VALUE}") + endif() + set(NUMPY_FOUND FALSE) + return() +endif() + +# Convert the process output into a list +string(REGEX REPLACE ";" "\\\\;" _NUMPY_VALUES ${_NUMPY_VALUES_OUTPUT}) +string(REGEX REPLACE "\n" ";" _NUMPY_VALUES ${_NUMPY_VALUES}) +# Just in case there is unexpected output from the Python command. +list(GET _NUMPY_VALUES -2 NUMPY_VERSION) +list(GET _NUMPY_VALUES -1 NUMPY_INCLUDE_DIRS) + +string(REGEX MATCH "^[0-9]+\\.[0-9]+\\.[0-9]+" _VER_CHECK "${NUMPY_VERSION}") +if("${_VER_CHECK}" STREQUAL "") + # The output from Python was unexpected. Raise an error always + # here, because we found NumPy, but it appears to be corrupted somehow. + message(FATAL_ERROR + "Requested version and include path from NumPy, got instead:\n${_NUMPY_VALUES_OUTPUT}\n") + return() +endif() + +# Make sure all directory separators are '/' +string(REGEX REPLACE "\\\\" "/" NUMPY_INCLUDE_DIRS ${NUMPY_INCLUDE_DIRS}) + +# Get the major and minor version numbers +string(REGEX REPLACE "\\." ";" _NUMPY_VERSION_LIST ${NUMPY_VERSION}) +list(GET _NUMPY_VERSION_LIST 0 NUMPY_VERSION_MAJOR) +list(GET _NUMPY_VERSION_LIST 1 NUMPY_VERSION_MINOR) +list(GET _NUMPY_VERSION_LIST 2 NUMPY_VERSION_PATCH) +string(REGEX MATCH "[0-9]*" NUMPY_VERSION_PATCH ${NUMPY_VERSION_PATCH}) +math(EXPR NUMPY_VERSION_DECIMAL + "(${NUMPY_VERSION_MAJOR} * 10000) + (${NUMPY_VERSION_MINOR} * 100) + ${NUMPY_VERSION_PATCH}") + +find_package_message(NUMPY + "Found NumPy: version \"${NUMPY_VERSION}\" ${NUMPY_INCLUDE_DIRS}" + "${NUMPY_INCLUDE_DIRS}${NUMPY_VERSION}") + +set(NUMPY_FOUND TRUE) + diff --git a/gpb/src/adjacency.h b/gpb/src/adjacency.h new file mode 100644 index 0000000..dd4e3ee --- /dev/null +++ b/gpb/src/adjacency.h @@ -0,0 +1,26 @@ +#pragma once + +#include + +class AdjacencyMatrix : public std::vector { +public: + AdjacencyMatrix(int n) { + this->n = n; + assign((n + 1)*n/2, false); + } + + reference get(int i, int j) { + return at(index(i, j)); + } + +protected: + int n; + + int index(int i, int j) { + int tmp = i > j ? j : i; + j = i > j ? i : j; + i = tmp; + return (n * i) + j - ((i * (i+1)) / 2); + } +}; + diff --git a/gpb/src/connection.h b/gpb/src/connection.h new file mode 100644 index 0000000..bb2f4a1 --- /dev/null +++ b/gpb/src/connection.h @@ -0,0 +1,23 @@ +#pragma once + +class Connection +{ +public: + Connection(int a_, int b_, float sim) : + a(a_), b(b_), similarity(sim) + { + } + + friend std::ostream& operator<<(std::ostream &out, Connection & c) { + return out << ""; + } + + bool operator<(const Connection & rhs) const { + return rhs.similarity > similarity; + } + + int a; + int b; + float similarity; +}; + diff --git a/gpb/src/conversion.cpp b/gpb/src/conversion.cpp new file mode 100644 index 0000000..475c670 --- /dev/null +++ b/gpb/src/conversion.cpp @@ -0,0 +1,231 @@ +# include "conversion.h" +/* + * The following conversion functions are taken/adapted from OpenCV's cv2.cpp file + * inside modules/python/src2 folder. + */ + +static void init() +{ + import_array(); +} + +static int failmsg(const char *fmt, ...) +{ + char str[1000]; + + va_list ap; + va_start(ap, fmt); + vsnprintf(str, sizeof(str), fmt, ap); + va_end(ap); + + PyErr_SetString(PyExc_TypeError, str); + return 0; +} + +class PyAllowThreads +{ +public: + PyAllowThreads() : _state(PyEval_SaveThread()) {} + ~PyAllowThreads() + { + PyEval_RestoreThread(_state); + } +private: + PyThreadState* _state; +}; + +class PyEnsureGIL +{ +public: + PyEnsureGIL() : _state(PyGILState_Ensure()) {} + ~PyEnsureGIL() + { + PyGILState_Release(_state); + } +private: + PyGILState_STATE _state; +}; + +using namespace cv; + +static PyObject* failmsgp(const char *fmt, ...) +{ + char str[1000]; + + va_list ap; + va_start(ap, fmt); + vsnprintf(str, sizeof(str), fmt, ap); + va_end(ap); + + PyErr_SetString(PyExc_TypeError, str); + return 0; +} + +class NumpyAllocator : public MatAllocator +{ +public: + NumpyAllocator() {} + ~NumpyAllocator() {} + + void allocate(int dims, const int* sizes, int type, int*& refcount, + uchar*& datastart, uchar*& data, size_t* step) + { + PyEnsureGIL gil; + + int depth = CV_MAT_DEPTH(type); + int cn = CV_MAT_CN(type); + const int f = (int)(sizeof(size_t)/8); + int typenum = depth == CV_8U ? NPY_UBYTE : depth == CV_8S ? NPY_BYTE : + depth == CV_16U ? NPY_USHORT : depth == CV_16S ? NPY_SHORT : + depth == CV_32S ? NPY_INT : depth == CV_32F ? NPY_FLOAT : + depth == CV_64F ? NPY_DOUBLE : f*NPY_ULONGLONG + (f^1)*NPY_UINT; + int i; + npy_intp _sizes[CV_MAX_DIM+1]; + for( i = 0; i < dims; i++ ) + { + _sizes[i] = sizes[i]; + } + + if( cn > 1 ) + { + _sizes[dims++] = cn; + } + + PyObject* o = PyArray_SimpleNew(dims, _sizes, typenum); + + if(!o) + { + CV_Error_(CV_StsError, ("The numpy array of typenum=%d, ndims=%d can not be created", typenum, dims)); + } + refcount = refcountFromPyObject(o); + + npy_intp* _strides = PyArray_STRIDES(o); + for( i = 0; i < dims - (cn > 1); i++ ) + step[i] = (size_t)_strides[i]; + datastart = data = (uchar*)PyArray_DATA(o); + } + + void deallocate(int* refcount, uchar*, uchar*) + { + PyEnsureGIL gil; + if( !refcount ) + return; + PyObject* o = pyObjectFromRefcount(refcount); + Py_INCREF(o); + Py_DECREF(o); + } +}; + +NumpyAllocator g_numpyAllocator; + +NDArrayConverter::NDArrayConverter() { init(); } + +void NDArrayConverter::init() +{ + import_array(); +} + +cv::Mat NDArrayConverter::toMat(const PyObject *o) +{ + cv::Mat m; + + if(!o || o == Py_None) + { + if( !m.data ) + m.allocator = &g_numpyAllocator; + } + + if( !PyArray_Check(o) ) + { + failmsg("toMat: Object is not a numpy array"); + } + + int typenum = PyArray_TYPE(o); + int type = typenum == NPY_UBYTE ? CV_8U : typenum == NPY_BYTE ? CV_8S : + typenum == NPY_USHORT ? CV_16U : typenum == NPY_SHORT ? CV_16S : + typenum == NPY_INT || typenum == NPY_LONG ? CV_32S : + typenum == NPY_FLOAT ? CV_32F : + typenum == NPY_DOUBLE ? CV_64F : -1; + + if( type < 0 ) + { + failmsg("toMat: Data type = %d is not supported", typenum); + } + + int ndims = PyArray_NDIM(o); + + if(ndims >= CV_MAX_DIM) + { + failmsg("toMat: Dimensionality (=%d) is too high", ndims); + } + + int size[CV_MAX_DIM+1]; + size_t step[CV_MAX_DIM+1], elemsize = CV_ELEM_SIZE1(type); + const npy_intp* _sizes = PyArray_DIMS(o); + const npy_intp* _strides = PyArray_STRIDES(o); + bool transposed = false; + + for(int i = 0; i < ndims; i++) + { + size[i] = (int)_sizes[i]; + step[i] = (size_t)_strides[i]; + } + + if( ndims == 0 || step[ndims-1] > elemsize ) { + size[ndims] = 1; + step[ndims] = elemsize; + ndims++; + } + + if( ndims >= 2 && step[0] < step[1] ) + { + std::swap(size[0], size[1]); + std::swap(step[0], step[1]); + transposed = true; + } + + if( ndims == 3 && size[2] <= CV_CN_MAX && step[1] == elemsize*size[2] ) + { + ndims--; + type |= CV_MAKETYPE(0, size[2]); + } + + if( ndims > 2) + { + failmsg("toMat: Object has more than 2 dimensions"); + } + + m = Mat(ndims, size, type, PyArray_DATA(o), step); + + if( m.data ) + { + m.refcount = refcountFromPyObject(o); + m.addref(); // protect the original numpy array from deallocation + // (since Mat destructor will decrement the reference counter) + }; + m.allocator = &g_numpyAllocator; + + if( transposed ) + { + Mat tmp; + tmp.allocator = &g_numpyAllocator; + transpose(m, tmp); + m = tmp; + } + return m; +} + +PyObject* NDArrayConverter::toNDArray(const cv::Mat& m) +{ + if( !m.data ) + Py_RETURN_NONE; + Mat temp, *p = (Mat*)&m; + if(!p->refcount || p->allocator != &g_numpyAllocator) + { + temp.allocator = &g_numpyAllocator; + m.copyTo(temp); + p = &temp; + } + p->addref(); + return pyObjectFromRefcount(p->refcount); +} diff --git a/gpb/src/conversion.h b/gpb/src/conversion.h new file mode 100644 index 0000000..3975e17 --- /dev/null +++ b/gpb/src/conversion.h @@ -0,0 +1,60 @@ +# ifndef __COVERSION_OPENCV_H__ +# define __COVERSION_OPENCV_H__ + +#include +#include +#include +#include +#include "numpy/ndarrayobject.h" + +static PyObject* opencv_error = 0; + +static int failmsg(const char *fmt, ...); + +class PyAllowThreads; + +class PyEnsureGIL; + +#define ERRWRAP2(expr) \ +try \ +{ \ + PyAllowThreads allowThreads; \ + expr; \ +} \ +catch (const cv::Exception &e) \ +{ \ + PyErr_SetString(opencv_error, e.what()); \ + return 0; \ +} + +static PyObject* failmsgp(const char *fmt, ...); + +static size_t REFCOUNT_OFFSET = (size_t)&(((PyObject*)0)->ob_refcnt) + + (0x12345678 != *(const size_t*)"\x78\x56\x34\x12\0\0\0\0\0")*sizeof(int); + +static inline PyObject* pyObjectFromRefcount(const int* refcount) +{ + return (PyObject*)((size_t)refcount - REFCOUNT_OFFSET); +} + +static inline int* refcountFromPyObject(const PyObject* obj) +{ + return (int*)((size_t)obj + REFCOUNT_OFFSET); +} + + +class NumpyAllocator; + +enum { ARG_NONE = 0, ARG_MAT = 1, ARG_SCALAR = 2 }; + +class NDArrayConverter +{ +private: + void init(); +public: + NDArrayConverter(); + cv::Mat toMat(const PyObject* o); + PyObject* toNDArray(const cv::Mat& mat); +}; + +# endif diff --git a/gpb/src/main.cpp b/gpb/src/main.cpp new file mode 100644 index 0000000..e918d53 --- /dev/null +++ b/gpb/src/main.cpp @@ -0,0 +1,166 @@ +//#define DEBUG 1 + +//#include "CVBoostConverter.hpp" +#include +#include +#include +#include "conversion.h" +#include + +#include "segment.h" +#include "connection.h" +#include "adjacency.h" + +using namespace boost::python; + +cv::Mat get_bboxes_(const cv::Mat & seg, const cv::Mat & edge) { + double max_id_; + cv::minMaxIdx(seg, nullptr, &max_id_); + int max_id = max_id_; + + std::vector> segments; + std::vector connections; + segments.reserve(max_id); + cv::Size size = seg.size(); + for (int i = 0; i <= max_id; i++) { + segments.push_back(std::make_shared(i, size)); + } + + { + //AdjacencyMatrix adjacency(max_id + 1); + for (int i = 0; i < seg.rows; i++) { + for (int j = 0; j < seg.cols; j++) { + cv::Point p(j, i); + uint16_t id = seg.at(p); + segments[id]->addPoint(p); + + if (i < seg.rows - 1) { + uint16_t n = seg.at(i+1, j); + if (n != id) {// && adjacency.get(id, n) == false) { + //adjacency.get(id, n) = true; + segments[id]->addNeighbour(n, edge.at(i+1, j)); + segments[n]->addNeighbour(id, edge.at(i+1, j)); + } + } + + if (j < seg.cols - 1) { + uint16_t n = seg.at(i, j+1); + if (n != id) { // && adjacency.get(id, n) == false) { + //adjacency.get(id, n) = true; + segments[id]->addNeighbour(n, edge.at(i, j+1)); + segments[n]->addNeighbour(id, edge.at(i, j+1)); + } + } + } + } + } + + cv::Mat bboxes; + { + AdjacencyMatrix adjacency(max_id + 1); + for (auto & s: segments) { + if (s->empty()) + continue; + + cv::Mat bbox = cv::Mat(1, 4, CV_32SC1); + bbox.at(0) = s->min_p.x; + bbox.at(1) = s->min_p.y; + bbox.at(2) = s->max_p.x; + bbox.at(3) = s->max_p.y; + if (bboxes.empty()) + bboxes = bbox; + else + cv::vconcat(bboxes, bbox, bboxes); + + for (auto & n: s->neighbours) { + if (adjacency.get(s->id, n.first) == false) { + adjacency.get(s->id, n.first) = true; + connections.push_back(Connection(s->id, segments[n.first]->id, s->computeSimilarity(segments[n.first].get()))); + } + } + } + } + +#ifdef DEBUG + cv::namedWindow("Segment", cv::WINDOW_NORMAL); +#endif + + while (connections.size() != 0) { + std::sort(connections.begin(), connections.end()); + Connection c = *connections.begin(); + connections.erase(connections.begin()); + std::shared_ptr s = segments[c.a]->merge(connections, segments, segments[c.b].get()); + +#ifdef DEBUG + cv::Mat draw = cv::Mat::zeros(seg.size(), CV_8UC1); + draw += segments[c.a]->mask * 127; + draw += segments[c.b]->mask * 255; + cv::imshow("Segment", draw); + cv::waitKey(); +#endif + + cv::Mat bbox = cv::Mat(1, 4, CV_32SC1); + bbox.at(0) = s->min_p.x; + bbox.at(1) = s->min_p.y; + bbox.at(2) = s->max_p.x; + bbox.at(3) = s->max_p.y; + if (bboxes.empty()) + bboxes = bbox; + else + cv::vconcat(bboxes, bbox, bboxes); + } + + return bboxes; +} + +PyObject * get_bboxes(PyObject * seg_, PyObject * edge_) { + NDArrayConverter cvt; + cv::Mat seg = cvt.toMat(seg_); + cv::Mat edge = cvt.toMat(edge_); + return cvt.toNDArray(get_bboxes_(seg, edge)); +} + +static void init_ar() { + Py_Initialize(); + import_array(); +} + +BOOST_PYTHON_MODULE(gpb_selection) { + init_ar(); + + def("get_bboxes", get_bboxes); +} + +int main(int argc, char * argv[]) { + if (argc != 3) { + std::cout << "Usage: " << argv[0] << " " << std::endl; + return 0; + } + + cv::Mat seg = cv::imread(argv[1], cv::IMREAD_UNCHANGED); + cv::Mat edge = cv::imread(argv[2], cv::IMREAD_UNCHANGED); + +// cv::namedWindow("Image", cv::WINDOW_NORMAL); +// cv::imshow("Image", (seg == 338) * 255); +// cv::waitKey(); + +// cv::namedWindow("Image", cv::WINDOW_NORMAL); +// while (cv::waitKey() != 'q') { + std::clock_t begin = std::clock(); + cv::Mat bboxes = get_bboxes_(seg, edge); + std::clock_t end = std::clock(); + double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC; + std::cout << "Times passed in seconds: " << elapsed_secs << std::endl; +// } + +// for (int i = 0; i < bboxes.rows; i++) { +// cv::Point p1(bboxes.at(i,0), bboxes.at(i,1)); +// cv::Point p2(bboxes.at(i,2), bboxes.at(i,3)); +// cv::Mat tmp; +// image.copyTo(tmp); +// cv::rectangle(tmp, p1, p2, cv::Scalar(255, 255, 0)); +// cv::imshow("Image", tmp); +// cv::waitKey(); +// } + return 0; +} diff --git a/gpb/src/segment.h b/gpb/src/segment.h new file mode 100644 index 0000000..0da7c91 --- /dev/null +++ b/gpb/src/segment.h @@ -0,0 +1,129 @@ +#pragma once + +#include +#include +#include "connection.h" + +class Segment +{ +public: + Segment(int id, cv::Size s) : +#ifdef DEBUG + mask(cv::Mat::zeros(s, CV_8UC1)), +#endif + id(id), + min_p(s.width, s.height), + max_p(0, 0), + im_size(s), + size(0) + { + } + + virtual void addPoint(cv::Point point) { +#ifdef DEBUG + mask.at(point) = 1; +#endif + + min_p = cv::Point(std::min(point.x, min_p.x), std::min(point.y, min_p.y)); + max_p = cv::Point(std::max(point.x, max_p.x), std::max(point.y, max_p.y)); + + size++; + } + + void addNeighbour(int n, uint8_t texture) { + if (neighbours.find(n) == neighbours.end()) + neighbours.insert({n, { texture }}); + else + neighbours[n].push_back(texture); + } + + float computeSimilarity(const Segment * b_) { + std::vector & edge = neighbours[b_->id]; + if (edge.size() == 0) { + return 0.f; + } + + std::sort(edge.begin(), edge.end()); + return edge[edge.size() / 2]; + } + + std::shared_ptr merge(std::vector & connections, std::vector> & segments, const Segment * b) { + std::shared_ptr s = std::make_shared(segments.size(), im_size); +#ifdef DEBUG + cv::bitwise_or(mask, b->mask, s->mask); +#endif + s->min_p = cv::Point(std::min(min_p.x, b->min_p.x), std::min(min_p.y, b->min_p.y)); + s->max_p = cv::Point(std::max(max_p.x, b->max_p.x), std::max(max_p.y, b->max_p.y)); + + s->neighbours = neighbours; + s->neighbours.erase(b->id); + for (auto & n: b->neighbours) { + if (n.first == id) + continue; + + if (s->neighbours.find(n.first) == s->neighbours.end()) + s->neighbours.insert(n); + else + s->neighbours[n.first].insert(s->neighbours[n.first].end(), n.second.begin(), n.second.end()); + } + + for (auto it = connections.begin(); it != connections.end(); ) { + if (it->a == id || it->a == b->id || it->b == id || it->b == b->id) { + it = connections.erase(it); + } + else + it++; + } + + for (auto & neighbour : s->neighbours) { + std::shared_ptr & n = segments[neighbour.first]; + auto it = n->neighbours.find(id); + if (it != n->neighbours.end()) { + n->neighbours.insert({ s->id, n->neighbours[id] }); + n->neighbours.erase(it); + + if (n->neighbours.find(b->id) != n->neighbours.end()) { + n->neighbours[s->id].insert(n->neighbours[s->id].end(), n->neighbours[b->id].begin(), n->neighbours[b->id].end()); + n->neighbours.erase(b->id); + } + } else if (n->neighbours.find(b->id) != n->neighbours.end()) { + n->neighbours.insert({ s->id, n->neighbours[b->id] }); + n->neighbours.erase(b->id); + } + + connections.push_back(Connection(s->id, n->id, s->computeSimilarity(n.get()))); + } + + segments.push_back(s); +// std::cout << "connection size: " << connections.size() << std::endl; + + return s; + } + + std::ostream & output(std::ostream & out) const { + return out; + } + + inline cv::Rect bbox() const { + return cv::Rect(min_p, max_p); + } + + inline bool empty() { + return size == 0; + } + + std::unordered_map> neighbours; +#ifdef DEBUG + cv::Mat mask; +#endif + int id; + cv::Point min_p; + cv::Point max_p; + cv::Size im_size; + int size; +}; + +std::ostream & operator<<(std::ostream & os, const Segment & b) { + return b.output(os); +} +