@inproceedings{yan_li_guo_li_li_lin_2022, title={ARCANE: An Efficient Architecture for Exact Machine Unlearning}, DOI={10.24963/ijcai.2022/553}, abstractNote={Recently users' right-to-be-forgotten is stipulated by many laws and regulations. However, only removing the data from the dataset is not enough, as machine learning models would memorize the training data once the data is involved in model training, increasing the risk of exposing users' privacy. To solve this problem, currently, the straightforward method, naive retraining, is to discard these data and retrain the model from scratch, which is reliable but brings much computational and time overhead. In this paper, we propose an exact unlearning architecture called ARCANE. Based on ensemble learning, we transform the naive retraining into multiple one-class classification tasks to reduce retraining cost while ensuring model performance, especially in the case of a large number of unlearning requests not considered by previous works. Then we further introduce data preprocessing methods to reduce the retraining overhead and speed up the unlearning, which includes representative data selection for redundancy removal, training state saving to reuse previous calculation results, and sorting to cope with unlearning requests of different distributions. We extensively evaluate ARCANE on three typical datasets with three common model architectures. Experiment results show the effectiveness and superiority of ARCANE over both the naive retraining and the state-of-the-art method in terms of model performance and unlearning speed.}, publisher={International Joint Conferences on Artificial Intelligence Organization}, author={Yan, Haonan and Li, Xiaoguang and Guo, Ziyao and Li, Hui and Li, Fenghua and Lin, Xiaodong}, year={2022} }