@article{3256,
  abstract     = {We use a distortion to define the dual complex of a cubical subdivision of ℝ n as an n-dimensional subcomplex of the nerve of the set of n-cubes. Motivated by the topological analysis of high-dimensional digital image data, we consider such subdivisions defined by generalizations of quad- and oct-trees to n dimensions. Assuming the subdivision is balanced, we show that mapping each vertex to the center of the corresponding n-cube gives a geometric realization of the dual complex in ℝ n.},
  author       = {Edelsbrunner, Herbert and Kerber, Michael},
  journal      = {Discrete & Computational Geometry},
  number       = {2},
  pages        = {393 -- 414},
  publisher    = {Springer},
  title        = {{Dual complexes of cubical subdivisions of ℝn}},
  doi          = {10.1007/s00454-011-9382-4},
  volume       = {47},
  year         = {2012},
}

@inproceedings{3265,
  abstract     = {We propose a mid-level statistical model for image segmentation that composes multiple figure-ground hypotheses (FG) obtained by applying constraints at different locations and scales, into larger interpretations (tilings) of the entire image. Inference is cast as optimization over sets of maximal cliques sampled from a graph connecting all non-overlapping figure-ground segment hypotheses. Potential functions over cliques combine unary, Gestalt-based figure qualities, and pairwise compatibilities among spatially neighboring segments, constrained by T-junctions and the boundary interface statistics of real scenes. Learning the model parameters is based on maximum likelihood, alternating between sampling image tilings and optimizing their potential function parameters. State of the art results are reported on the Berkeley and Stanford segmentation datasets, as well as VOC2009, where a 28% improvement was achieved.},
  author       = {Ion, Adrian and Carreira, Joao and Sminchisescu, Cristian},
  location     = {Barcelona, Spain},
  publisher    = {IEEE},
  title        = {{Image segmentation by figure-ground composition into maximal cliques}},
  doi          = {10.1109/ICCV.2011.6126486},
  year         = {2012},
}

@article{3310,
  abstract     = {The theory of persistent homology opens up the possibility to reason about topological features of a space or a function quantitatively and in combinatorial terms. We refer to this new angle at a classical subject within algebraic topology as a point calculus, which we present for the family of interlevel sets of a real-valued function. Our account of the subject is expository, devoid of proofs, and written for non-experts in algebraic topology.},
  author       = {Bendich, Paul and Cabello, Sergio and Edelsbrunner, Herbert},
  journal      = {Pattern Recognition Letters},
  number       = {11},
  pages        = {1436 -- 1444},
  publisher    = {Elsevier},
  title        = {{A point calculus for interlevel set homology}},
  doi          = {10.1016/j.patrec.2011.10.007},
  volume       = {33},
  year         = {2012},
}

@article{3331,
  abstract     = {Computing the topology of an algebraic plane curve C means computing a combinatorial graph that is isotopic to C and thus represents its topology in R2. We prove that, for a polynomial of degree n with integer coefficients bounded by 2ρ, the topology of the induced curve can be computed with  bit operations ( indicates that we omit logarithmic factors). Our analysis improves the previous best known complexity bounds by a factor of n2. The improvement is based on new techniques to compute and refine isolating intervals for the real roots of polynomials, and on the consequent amortized analysis of the critical fibers of the algebraic curve.},
  author       = {Kerber, Michael and Sagraloff, Michael},
  journal      = { Journal of Symbolic Computation},
  number       = {3},
  pages        = {239 -- 258},
  publisher    = {Elsevier},
  title        = {{A worst case bound for topology computation of algebraic curves}},
  doi          = {10.1016/j.jsc.2011.11.001},
  volume       = {47},
  year         = {2012},
}

@article{6588,
  abstract     = {First we note that the best polynomial approximation to vertical bar x vertical bar on the set, which consists of an interval on the positive half-axis and a point on the negative half-axis, can be given by means of the classical Chebyshev polynomials. Then we explore the cases when a solution of the related problem on two intervals can be given in elementary functions.},
  author       = {Pausinger, Florian},
  issn         = {1812-9471},
  journal      = {Journal of Mathematical Physics, Analysis, Geometry},
  number       = {1},
  pages        = {63--78},
  publisher    = {B. Verkin Institute for Low Temperature Physics and Engineering},
  title        = {{Elementary solutions of the Bernstein problem on two intervals}},
  volume       = {8},
  year         = {2012},
}

@inproceedings{3266,
  abstract     = {We present a joint image segmentation and labeling model (JSL) which, given a bag of figure-ground segment hypotheses extracted at multiple image locations and scales, constructs a joint probability distribution over both the compatible image interpretations (tilings or image segmentations) composed from those segments, and over their labeling into categories. The process of drawing samples from the joint distribution can be interpreted as first sampling tilings, modeled as maximal cliques, from a graph connecting spatially non-overlapping segments in the bag [1], followed by sampling labels for those segments, conditioned on the choice of a particular tiling. We learn the segmentation and labeling parameters jointly, based on Maximum Likelihood with a novel Incremental Saddle Point estimation procedure. The partition function over tilings and labelings is increasingly more accurately approximated by including incorrect configurations that a not-yet-competent model rates probable during learning. We show that the proposed methodologymatches the current state of the art in the Stanford dataset [2], as well as in VOC2010, where 41.7% accuracy on the test set is achieved.},
  author       = {Ion, Adrian and Carreira, Joao and Sminchisescu, Cristian},
  booktitle    = {NIPS Proceedings},
  location     = {Granada, Spain},
  pages        = {1827 -- 1835},
  publisher    = {Neural Information Processing Systems Foundation},
  title        = {{Probabilistic joint image segmentation and labeling}},
  volume       = {24},
  year         = {2011},
}

@article{3267,
  abstract     = {We address the problem of localizing homology classes, namely, finding the cycle representing a given class with the most concise geometric measure. We study the problem with different measures: volume, diameter and radius. For volume, that is, the 1-norm of a cycle, two main results are presented. First, we prove that the problem is NP-hard to approximate within any constant factor. Second, we prove that for homology of dimension two or higher, the problem is NP-hard to approximate even when the Betti number is O(1). The latter result leads to the inapproximability of the problem of computing the nonbounding cycle with the smallest volume and computing cycles representing a homology basis with the minimal total volume. As for the other two measures defined by pairwise geodesic distance, diameter and radius, we show that the localization problem is NP-hard for diameter but is polynomial for radius. Our work is restricted to homology over the ℤ2 field.},
  author       = {Chen, Chao and Freedman, Daniel},
  journal      = {Discrete & Computational Geometry},
  number       = {3},
  pages        = {425 -- 448},
  publisher    = {Springer},
  title        = {{Hardness results for homology localization}},
  doi          = {10.1007/s00454-010-9322-8},
  volume       = {45},
  year         = {2011},
}

@article{3269,
  abstract     = {The unintentional scattering of light between neighboring surfaces in complex projection environments increases the brightness and decreases the contrast, disrupting the appearance of the desired imagery. To achieve satisfactory projection results, the inverse problem of global illumination must be solved to cancel this secondary scattering. In this paper, we propose a global illumination cancellation method that minimizes the perceptual difference between the desired imagery and the actual total illumination in the resulting physical environment. Using Gauss-Newton and active set methods, we design a fast solver for the bound constrained nonlinear least squares problem raised by the perceptual error metrics. Our solver is further accelerated with a CUDA implementation and multi-resolution method to achieve 1–2 fps for problems with approximately 3000 variables. We demonstrate the global illumination cancellation algorithm with our multi-projector system. Results show that our method preserves the color fidelity of the desired imagery significantly better than previous methods.},
  author       = {Sheng, Yu and Cutler, Barbara and Chen, Chao and Nasman, Joshua},
  journal      = {Computer Graphics Forum},
  number       = {4},
  pages        = {1261 -- 1268},
  publisher    = {Wiley-Blackwell},
  title        = {{Perceptual global illumination cancellation in complex projection environments}},
  doi          = {10.1111/j.1467-8659.2011.01985.x},
  volume       = {30},
  year         = {2011},
}

@inproceedings{3270,
  abstract     = {The persistence diagram of a filtered simplicial com- plex is usually computed by reducing the boundary matrix of the complex. We introduce a simple op- timization technique: by processing the simplices of the complex in decreasing dimension, we can “kill” columns (i.e., set them to zero) without reducing them. This technique completely avoids reduction on roughly half of the columns. We demonstrate that this idea significantly improves the running time of the reduction algorithm in practice. We also give an output-sensitive complexity analysis for the new al- gorithm which yields to sub-cubic asymptotic bounds under certain assumptions.},
  author       = {Chen, Chao and Kerber, Michael},
  location     = {Morschach, Switzerland},
  pages        = {197 -- 200},
  publisher    = {TU Dortmund},
  title        = {{Persistent homology computation with a twist}},
  year         = {2011},
}

@inbook{3271,
  abstract     = {In this paper we present an efficient framework for computation of persis- tent homology of cubical data in arbitrary dimensions. An existing algorithm using simplicial complexes is adapted to the setting of cubical complexes. The proposed approach enables efficient application of persistent homology in domains where the data is naturally given in a cubical form. By avoiding triangulation of the data, we significantly reduce the size of the complex. We also present a data-structure de- signed to compactly store and quickly manipulate cubical complexes. By means of numerical experiments, we show high speed and memory efficiency of our ap- proach. We compare our framework to other available implementations, showing its superiority. Finally, we report performance on selected 3D and 4D data-sets.},
  author       = {Wagner, Hubert and Chen, Chao and Vuçini, Erald},
  booktitle    = {Topological Methods in Data Analysis and Visualization II},
  editor       = {Peikert, Ronald and Hauser, Helwig and Carr, Hamish and Fuchs, Raphael},
  pages        = {91 -- 106},
  publisher    = {Springer},
  title        = {{Efficient computation of persistent homology for cubical data}},
  doi          = {10.1007/978-3-642-23175-9_7},
  year         = {2011},
}

@inbook{3311,
  abstract     = {Alpha shapes have been conceived in 1981 as an attempt to define the shape of a finite set of point in the plane. Since then, connections to diverse areas in the sciences and engineering have developed, including to pattern recognition, digital shape sampling and processing, and structural molecular biology. This survey begins with a historical account and discusses geometric, algorithmic, topological, and combinatorial aspects of alpha shapes in this sequence.},
  author       = {Edelsbrunner, Herbert},
  booktitle    = {Tessellations in the Sciences: Virtues, Techniques and Applications of Geometric Tilings},
  editor       = {van de Weygaert, R and Vegter, G and Ritzerveld, J and Icke, V},
  publisher    = {Springer},
  title        = {{Alpha shapes - a survey}},
  year         = {2011},
}

@misc{3312,
  abstract     = {We study the 3D reconstruction of plant roots from multiple 2D images. To meet the challenge caused by the delicate nature of thin branches, we make three innovations to cope with the sensitivity to image quality and calibration. First, we model the background as a harmonic function to improve the segmentation of the root in each 2D image. Second, we develop the concept of the regularized visual hull which reduces the effect of jittering and refraction by ensuring consistency with one 2D image. Third, we guarantee connectedness through adjustments to the 3D reconstruction that minimize global error. Our software is part of a biological phenotype/genotype study of agricultural root systems. It has been tested on more than 40 plant roots and results are promising in terms of reconstruction quality and efficiency.},
  author       = {Zheng, Ying and Gu, Steve and Edelsbrunner, Herbert and Tomasi, Carlo and Benfey, Philip},
  booktitle    = {Proceedings of the IEEE International Conference on Computer Vision},
  location     = {Barcelona, Spain},
  publisher    = {IEEE},
  title        = {{Detailed reconstruction of 3D plant root shape}},
  doi          = {10.1109/ICCV.2011.6126475},
  year         = {2011},
}

@inproceedings{3313,
  abstract     = {Interpreting an image as a function on a compact sub- set of the Euclidean plane, we get its scale-space by diffu- sion, spreading the image over the entire plane. This gener- ates a 1-parameter family of functions alternatively defined as convolutions with a progressively wider Gaussian ker- nel. We prove that the corresponding 1-parameter family of persistence diagrams have norms that go rapidly to zero as time goes to infinity. This result rationalizes experimental observations about scale-space. We hope this will lead to targeted improvements of related computer vision methods.},
  author       = {Chen, Chao and Edelsbrunner, Herbert},
  booktitle    = {Proceedings of the IEEE International Conference on Computer Vision},
  location     = {Barcelona, Spain},
  publisher    = {IEEE},
  title        = {{Diffusion runs low on persistence fast}},
  doi          = {10.1109/ICCV.2011.6126271},
  year         = {2011},
}

@inproceedings{3328,
  abstract     = {We report on a generic uni- and bivariate algebraic kernel that is publicly available with CGAL 3.7. It comprises complete, correct, though efficient state-of-the-art implementations on polynomials, roots of polynomial systems, and the support to analyze algebraic curves defined by bivariate polynomials. The kernel design is generic, that is, various number types and substeps can be exchanged. It is accompanied with a ready-to-use interface to enable arrangements induced by algebraic curves, that have already been used as basis for various geometric applications, as arrangements on Dupin cyclides or the triangulation of algebraic surfaces. We present two novel applications: arrangements of rotated algebraic curves and Boolean set operations on polygons bounded by segments of algebraic curves. We also provide experiments showing that our general implementation is competitive and even often clearly outperforms existing implementations that are explicitly tailored for specific types of non-linear curves that are available in CGAL.},
  author       = {Berberich, Eric and Hemmer, Michael and Kerber, Michael},
  location     = {Paris, France},
  pages        = {179 -- 186},
  publisher    = {ACM},
  title        = {{A generic algebraic kernel for non linear geometric applications}},
  doi          = {10.1145/1998196.1998224},
  year         = {2011},
}

@inproceedings{3329,
  abstract     = {We consider the offset-deconstruction problem: Given a polygonal shape Q with n vertices, can it be expressed, up to a tolerance µ in Hausdorff distance, as the Minkowski sum of another polygonal shape P with a disk of fixed radius? If it does, we also seek a preferably simple-looking solution shape P; then, P's offset constitutes an accurate, vertex-reduced, and smoothened approximation of Q. We give an O(n log n)-time exact decision algorithm that handles any polygonal shape, assuming the real-RAM model of computation. An alternative algorithm, based purely on rational arithmetic, answers the same deconstruction problem, up to an uncertainty parameter, and its running time depends on the parameter δ (in addition to the other input parameters: n, δ and the radius of the disk). If the input shape is found to be approximable, the rational-arithmetic algorithm also computes an approximate solution shape for the problem. For convex shapes, the complexity of the exact decision algorithm drops to O(n), which is also the time required to compute a solution shape P with at most one more vertex than a vertex-minimal one. Our study is motivated by applications from two different domains. However, since the offset operation has numerous uses, we anticipate that the reverse question that we study here will be still more broadly applicable. We present results obtained with our implementation of the rational-arithmetic algorithm.},
  author       = {Berberich, Eric and Halperin, Dan and Kerber, Michael and Pogalnikova, Roza},
  booktitle    = {Proceedings of the twenty-seventh annual symposium on Computational geometry},
  location     = {Paris, France},
  pages        = {187 -- 196},
  publisher    = {ACM},
  title        = {{Deconstructing approximate offsets}},
  doi          = {10.1145/1998196.1998225},
  year         = {2011},
}

@inproceedings{3330,
  abstract     = {We consider the problem of approximating all real roots of a square-free polynomial f. Given isolating intervals, our algorithm refines each of them to a width at most 2-L, that is, each of the roots is approximated to L bits after the binary point. Our method provides a certified answer for arbitrary real polynomials, only requiring finite approximations of the polynomial coefficient and choosing a suitable working precision adaptively. In this way, we get a correct algorithm that is simple to implement and practically efficient. Our algorithm uses the quadratic interval refinement method; we adapt that method to be able to cope with inaccuracies when evaluating f, without sacrificing its quadratic convergence behavior. We prove a bound on the bit complexity of our algorithm in terms of degree, coefficient size and discriminant. Our bound improves previous work on integer polynomials by a factor of deg f and essentially matches best known theoretical bounds on root approximation which are obtained by very sophisticated algorithms.},
  author       = {Kerber, Michael and Sagraloff, Michael},
  location     = {California, USA},
  pages        = {209 -- 216},
  publisher    = {Springer},
  title        = {{Root refinement for real polynomials}},
  doi          = {10.1145/1993886.1993920},
  year         = {2011},
}

@article{3332,
  abstract     = {Given an algebraic hypersurface O in ℝd, how many simplices are necessary for a simplicial complex isotopic to O? We address this problem and the variant where all vertices of the complex must lie on O. We give asymptotically tight worst-case bounds for algebraic plane curves. Our results gradually improve known bounds in higher dimensions; however, the question for tight bounds remains unsolved for d ≥ 3.},
  author       = {Kerber, Michael and Sagraloff, Michael},
  journal      = {Graphs and Combinatorics},
  number       = {3},
  pages        = {419 -- 430},
  publisher    = {Springer},
  title        = {{A note on the complexity of real algebraic hypersurfaces}},
  doi          = {10.1007/s00373-011-1020-7},
  volume       = {27},
  year         = {2011},
}

@article{3334,
  author       = {Edelsbrunner, Herbert and Pach, János and Ziegler, Günter},
  journal      = {Discrete & Computational Geometry},
  number       = {1},
  pages        = {1 -- 2},
  publisher    = {Springer},
  title        = {{Letter from the new editors-in-chief}},
  doi          = {10.1007/s00454-010-9313-9},
  volume       = {45},
  year         = {2011},
}

@inbook{3335,
  abstract     = {We study the topology of the Megaparsec Cosmic Web in terms of the scale-dependent Betti numbers, which formalize the topological information content of the cosmic mass distribution. While the Betti numbers do not fully quantify topology, they extend the information beyond conventional cosmological studies of topology in terms of genus and Euler characteristic. The richer information content of Betti numbers goes along the availability of fast algorithms to compute them. For continuous density fields, we determine the scale-dependence of Betti numbers by invoking the cosmologically familiar filtration of sublevel or superlevel sets defined by density thresholds. For the discrete galaxy distribution, however, the analysis is based on the alpha shapes of the particles. These simplicial complexes constitute an ordered sequence of nested subsets of the Delaunay tessellation, a filtration defined by the scale parameter, α. As they are homotopy equivalent to the sublevel sets of the distance field, they are an excellent tool for assessing the topological structure of a discrete point distribution. In order to develop an intuitive understanding for the behavior of Betti numbers as a function of α, and their relation to the morphological patterns in the Cosmic Web, we first study them within the context of simple heuristic Voronoi clustering models. These can be tuned to consist of specific morphological elements of the Cosmic Web, i.e. clusters, filaments, or sheets. To elucidate the relative prominence of the various Betti numbers in different stages of morphological evolution, we introduce the concept of alpha tracks. Subsequently, we address the topology of structures emerging in the standard LCDM scenario and in cosmological scenarios with alternative dark energy content. The evolution of the Betti numbers is shown to reflect the hierarchical evolution of the Cosmic Web. We also demonstrate that the scale-dependence of the Betti numbers yields a promising measure of cosmological parameters, with a potential to help in determining the nature of dark energy and to probe primordial non-Gaussianities. We also discuss the expected Betti numbers as a function of the density threshold for superlevel sets of a Gaussian random field. Finally, we introduce the concept of persistent homology. It measures scale levels of the mass distribution and allows us to separate small from large scale features. Within the context of the hierarchical cosmic structure formation, persistence provides a natural formalism for a multiscale topology study of the Cosmic Web.},
  author       = {Van De Weygaert, Rien and Vegter, Gert and Edelsbrunner, Herbert and Jones, Bernard and Pranav, Pratyush and Park, Changbom and Hellwing, Wojciech and Eldering, Bob and Kruithof, Nico and Bos, Patrick and Hidding, Johan and Feldbrugge, Job and Ten Have, Eline and Van Engelen, Matti and Caroli, Manuel and Teillaud, Monique},
  booktitle    = {Transactions on Computational Science XIV},
  editor       = {Gavrilova, Marina and Tan, Kenneth and Mostafavi, Mir},
  pages        = {60 -- 101},
  publisher    = {Springer},
  title        = {{Alpha, Betti and the Megaparsec Universe: On the topology of the Cosmic Web}},
  doi          = {10.1007/978-3-642-25249-5_3},
  volume       = {6970},
  year         = {2011},
}

@inproceedings{3336,
  abstract     = {We introduce TopoCut: a new way to integrate knowledge about topological properties (TPs) into random field image segmentation model. Instead of including TPs as additional constraints during minimization of the energy function, we devise an efficient algorithm for modifying the unary potentials such that the resulting segmentation is guaranteed with the desired properties. Our method is more flexible in the sense that it handles more topology constraints than previous methods, which were only able to enforce pairwise or global connectivity. In particular, our method is very fast, making it for the first time possible to enforce global topological properties in practical image segmentation tasks.},
  author       = {Chen, Chao and Freedman, Daniel and Lampert, Christoph},
  booktitle    = {CVPR: Computer Vision and Pattern Recognition},
  isbn         = {978-1-4577-0394-2},
  location     = {Colorado Springs, CO, United States},
  pages        = {2089 -- 2096},
  publisher    = {IEEE},
  title        = {{Enforcing topological constraints in random field image segmentation}},
  doi          = {10.1109/CVPR.2011.5995503},
  year         = {2011},
}

