Skip to content

Commit

Permalink
Update papers.bib
Browse files Browse the repository at this point in the history
  • Loading branch information
zhouqp631 authored Sep 14, 2024
1 parent 74a61ed commit 06682f7
Showing 1 changed file with 6 additions and 5 deletions.
11 changes: 6 additions & 5 deletions _bibliography/papers.bib
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@ @article{ke2024gibbsbps
preview={GibbsBPS.jpg},
title={Fused L<sub>1/2</sub> prior for large scale linear inverse problem with Gibbs bouncy particle sampler},
author={Xiongwen Ke and Yanan Fan and Qingping Zhou},
abstract={In this paper, we study Bayesian approach for solving large scale linear inverse problems arising in various scientific and engineering fields. We propose a fused $L_{1/2}$ prior with edge-preserving and sparsity-promoting properties and show that it can be formulated as a Gaussian mixture Markov random field. Since the density function of this family of prior is neither log-concave nor Lipschitz, gradient-based Markov chain Monte Carlo methods can not be applied to sample the posterior. Thus, we present a Gibbs sampler in which all the conditional posteriors involved have closed form expressions. The Gibbs sampler works well for small size problems but it is computationally intractable for large scale problems due to the need for sample high dimensional Gaussian distribution. To reduce the computation burden, we construct a Gibbs bouncy particle sampler (Gibbs-BPS) based on a piecewise deterministic Markov process. This new sampler combines elements of Gibbs sampler with bouncy particle sampler and its computation complexity is an order of magnitude smaller. We show that the new sampler converges to the target distribution. With computed tomography examples, we demonstrate that the proposed method shows competitive performance with existing popular Bayesian methods and is highly efficient in large scale problems.},
abstract={In this paper, we study Bayesian approach for solving large scale linear inverse problems arising in various scientific and engineering fields. We propose a fused L<sub>1/2</sub> prior with edge-preserving and sparsity-promoting properties and show that it can be formulated as a Gaussian mixture Markov random field. Since the density function of this family of prior is neither log-concave nor Lipschitz, gradient-based Markov chain Monte Carlo methods can not be applied to sample the posterior. Thus, we present a Gibbs sampler in which all the conditional posteriors involved have closed form expressions. The Gibbs sampler works well for small size problems but it is computationally intractable for large scale problems due to the need for sample high dimensional Gaussian distribution. To reduce the computation burden, we construct a Gibbs bouncy particle sampler (Gibbs-BPS) based on a piecewise deterministic Markov process. This new sampler combines elements of Gibbs sampler with bouncy particle sampler and its computation complexity is an order of magnitude smaller. We show that the new sampler converges to the target distribution. With computed tomography examples, we demonstrate that the proposed method shows competitive performance with existing popular Bayesian methods and is highly efficient in large scale problems.},
year={2024},
arxiv = {2409.07874}
arxiv = {2409.07874},
selected={true}
}

@article{zhou2023deep,
Expand All @@ -35,7 +36,8 @@ @article{wang2024comparative
year={2024},
publisher={De Gruyter},
html={https://doi.org/10.1515/jiip-2023-0037},
pdf = {https://arxiv.org/abs/2310.15831}
pdf = {https://arxiv.org/abs/2310.15831},
selected={true}
}

@article{hu2024surrogate,
Expand All @@ -62,8 +64,7 @@ @article{xu2024enhancing
year={2024},
publisher={Springer},
html={https://doi.org/10.1007/s10915-023-02439-4},
pdf = {https://arxiv.org/abs/2304.14491},
selected={true}
pdf = {https://arxiv.org/abs/2304.14491}
}


Expand Down

0 comments on commit 06682f7

Please sign in to comment.