OLMoTrace: Tracing Language Model Outputs Back to Trillions of Training Tokens
Authors
Venue
arXiv preprint arXiv:2504.07096
Tags
Links
BibTeX
Local Entry
@article{liu2025olmotrace,
title = {OLMoTrace: Tracing Language Model Outputs Back to Trillions of Training Tokens},
author = {Liu, Jiacheng and Blanton, Taylor and Elazar, Yanai and Min, Sewon and Chen, YenSung and Chheda-Kothary, Arnavi and Tran, Huy and Bischoff, Byron and Marsh, Eric and Schmitz, Michael and others},
year = {2025},
journal = {arXiv preprint arXiv:2504.07096},
url = {https://arxiv.org/abs/2504.07096}
} From OPENALEX
@article{liu2025olmotrace,
title = {OLMoTrace: Tracing Language Model Outputs Back to Trillions of Training Tokens},
author = {Jiacheng Liu and Thomas N. Blanton and Sewon Min and Arnavi Chheda-Kothary and Huy Tran and Eric Marsh and Cassidy Trier and John T. James and Jon Borchardt and Evie Yu-Yen Cheng and Karen Farley and Sruthi Sreeram and Thomas Anderson and David Albright and Pang Wei Koh and Yejin Choi and Hannaneh Hajishirzi and Ali Farhadi},
year = {2025},
journal = {arXiv (Cornell University)},
doi = {10.48550/arxiv.2504.07096}
}