@phdthesis{Le2012, author = {Trung Kien Le}, title = {Information Dependency and Its Applications}, journal = {Informationsabh{\"a}ngigkeit und ihre Anwendungen}, url = {https://nbn-resolving.org/urn:nbn:de:gbv:9-001354-1}, year = {2012}, abstract = {Independence is a basic concept of probability theory and statistics. In a lot of fields of sciences, dependency of different variables is gained lots of attention from scientists. A measure, named information dependency, is proposed to express the dependency of a group of random variables. This measure is defined as the Kullback-Leibler divergence of a joint distribution with respect to a product-marginal distribution of these random variables. In the bivariate case, this measure is known as mutual information of two random variables. Thus, the measure information dependency has a strong relationship with the Information Theory. The thesis aims to give a thorough study of the information dependency from both mathematical and practical viewpoints. Concretely, we would like to research three following problems: 1. Proving that the information dependency is a useful tool to express the dependency of a group of random variables by comparing it with other measures of dependency. 2. Studying the methods to estimate the information dependency based on the samples of a group of random variables. 3. Investigating how the Independent Component Analysis problem, an interesting problem in statistics, can be solved using information dependency.}, language = {en} }