@inproceedings{iqbal_transfer_2019, address = {Santa Clara, CA}, title = {Transfer {Learning} for {Performance} {Modeling} of {Deep} {Neural} {Network} {Systems}}, url = {https://www.usenix.org/conference/opml19/presentation/iqbal}, abstract = {Modern deep neural network (DNN) systems are highly configurable with large a number of options that significantly affect their non-functional behavior, for example inference time and energy consumption. Performance models allow to understand and predict the effects of such configuration options on system behavior, but are costly to build because of large configuration spaces. Performance models from one environment cannot be transferred directly to another; usually models are rebuilt from scratch for different environments, for example different hardware. Recently, transfer learning methods have been applied to reuse knowledge from performance models trained in one environment in another. In this paper, we perform an empirical study to understand the effectiveness of different transfer learning strategies for building performance models of DNN systems. Our results show that transferring information on the most influential configuration options and their interactions is an effective way of reducing the cost to build performance models in new environments.}, booktitle = {{USENIX} {Conference} on {Operational} {Machine} {Learning}}, publisher = {USENIX Association}, author = {Iqbal, Md Shahriar and Kotthoff, Lars and Jamshidi, Pooyan}, year = {2019} }