@article{jiao_li_liu_lu_yang_2021, title={Just Least Squares: Binary Compressive Sampling with Low Generative Intrinsic Dimension}, DOI={10.48550/arxiv.2111.14486}, abstractNote={In this paper, we consider recovering $n$ dimensional signals from $m$ binary measurements corrupted by noises and sign flips under the assumption that the target signals have low generative intrinsic dimension, i.e., the target signals can be approximately generated via an $L$-Lipschitz generator $G: \mathbb{R}^k\rightarrow\mathbb{R}^{n}, k\ll n$. Although the binary measurements model is highly nonlinear, we propose a least square decoder and prove that, up to a constant $c$, with high probability, the least square decoder achieves a sharp estimation error $\mathcal{O} (\sqrt{\frac{k\log (Ln)}{m}})$ as long as $m\geq \mathcal{O}( k\log (Ln))$. Extensive numerical simulations and comparisons with state-of-the-art methods demonstrated the least square decoder is robust to noise and sign flips, as indicated by our theory. By constructing a ReLU network with properly chosen depth and width, we verify the (approximately) deep generative prior, which is of independent interest.}, publisher={arXiv}, author={Jiao, Yuling and Li, Dingwei and Liu, Min and Lu, Xiangliang and Yang, Yuanyuan}, year={2021}, month={Nov} }