@phdthesis{Kosytsina2012,
author = {Nadiya Kosytsina},
title = {Jump penalized L1-Regression},
journal = {Jump penalized L1-Regression},
url = {https://nbn-resolving.org/urn:nbn:de:gbv:9-001279-6},
year = {2012},
abstract = {In this thesis we have considered a regression model of one dimensional noisy data with the regression function taken from the class L1 ([0, 1)) of absolutely integrable on the interval [0, 1) functions. In particular, we focused on the piecewise constant jump penalized estimators, which minimize the L1 version of the so called Potts functional (see e.g.[8]). Originally, the Potts functional is constructed as the jump penalized least square estimator of a function from the class L2 ([0, 1)) of square integrable on the interval [0, 1) functions. More precisely, in this work, the measure of fidelity of the estimator to the given data is assumed to be given by the average absolute deviation of the data points from the estimator. Additionally, the roughness of the estimator is controlled by the total number of its jumps. The underlying objective of this work is to develop a robust regression model, capable of dealing with data, which contains outliers. Following this, the reason behind the particular choice of the class of the estimator functions is dictated by the well known fact that the median is much less sensitive to the outliers than the average. Our main result concerns consistency of the minimizers of the L1-Potts functional that means the convergence of the corresponding minimizers to the original function f, which is unknown a priori. The only information available about this function is contained in the given noisy data.},
language = {de}
}