@article {924, title = {Assessing the Cost of Redistribution followed by a Computational Kernel: Complexity and Performance Results}, journal = {Parallel Computing}, volume = {52}, year = {2016}, month = {2016-02}, pages = {22-41}, abstract = {The classical redistribution problem aims at optimally scheduling communications when reshuffling from an initial data distribution to a target data distribution. This target data distribution is usually chosen to optimize some objective for the algorithmic kernel under study (good computational balance or low communication volume or cost), and therefore to provide high efficiency for that kernel. However, the choice of a distribution minimizing the target objective is not unique. This leads to generalizing the redistribution problem as follows: find a re-mapping of data items onto processors such that the data redistribution cost is minimal, and the operation remains as efficient. This paper studies the complexity of this generalized problem. We compute optimal solutions and evaluate, through simulations, their gain over classical redistribution. We also show the NP-hardness of the problem to find the optimal data partition and processor permutation (defined by new subsets) that minimize the cost of redistribution followed by a simple computational kernel. Finally, experimental validation of the new redistribution algorithms are conducted on a multicore cluster, for both a 1D-stencil kernel and a more compute-intensive dense linear algebra routine.}, keywords = {Data partition, linear algebra, parsec, QR factorization, Redistribution, Stencil}, doi = {doi:10.1016/j.parco.2015.09.005}, author = {Julien Herrmann and George Bosilca and Thomas Herault and Loris Marchal and Yves Robert and Jack Dongarra} }